From 822a7b4c8df34a288a4c6461868a30b042115f9a Mon Sep 17 00:00:00 2001 From: "smithery-ai[bot]" <194235850+smithery-ai[bot]@users.noreply.github.com> Date: Sun, 13 Apr 2025 22:23:48 +0000 Subject: [PATCH 001/565] Add Dockerfile --- Dockerfile | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..58302a68 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,35 @@ +# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile +FROM python:3.12-slim + +# Install Chrome dependencies +RUN apt-get update && apt-get install -y \ + wget \ + unzip \ + libnss3 \ + libgconf-2-4 \ + libxi6 \ + libgdk-pixbuf2.0-0 \ + libxrandr2 \ + ca-certificates \ + fonts-liberation \ + libappindicator3-1 \ + libasound2 \ + libatk-bridge2.0-0 \ + libatk1.0-0 \ + libgtk-3-0 \ + && rm -rf /var/lib/apt/lists/* + +# Set work directory +WORKDIR /app + +# Copy the project files +COPY . /app + +# Upgrade pip and install build dependencies +RUN pip install --upgrade pip \ + && pip install --no-cache-dir . + +# Expose any ports if necessary (MCP likely communicates via stdio so no port exposure) + +# Set default command to run the MCP server +CMD ["python", "main.py", "--no-setup"] From c08578c0e8695c900b66c748a9ded97d14b00032 Mon Sep 17 00:00:00 2001 From: "smithery-ai[bot]" <194235850+smithery-ai[bot]@users.noreply.github.com> Date: Sun, 13 Apr 2025 22:23:49 +0000 Subject: [PATCH 002/565] Add Smithery configuration --- smithery.yaml | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 smithery.yaml diff --git a/smithery.yaml b/smithery.yaml new file mode 100644 index 00000000..337fe9a8 --- /dev/null +++ b/smithery.yaml @@ -0,0 +1,33 @@ +# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml + +startCommand: + type: stdio + configSchema: + # JSON Schema defining the configuration options for the MCP. + type: object + properties: + LINKEDIN_EMAIL: + type: string + description: Email for LinkedIn login + LINKEDIN_PASSWORD: + type: string + description: Password for LinkedIn login + CHROMEDRIVER: + type: string + description: Path to the ChromeDriver binary. Optional if ChromeDriver is in PATH. + commandFunction: + # A JS function that produces the CLI command based on the given config to start the MCP on stdio. + |- + (config) => ({ + command: 'python', + args: ['main.py', '--no-setup'], + env: { + LINKEDIN_EMAIL: config.LINKEDIN_EMAIL || '', + LINKEDIN_PASSWORD: config.LINKEDIN_PASSWORD || '', + CHROMEDRIVER: config.CHROMEDRIVER || '' + } + }) + exampleConfig: + LINKEDIN_EMAIL: example.user@example.com + LINKEDIN_PASSWORD: yourLinkedInPassword + CHROMEDRIVER: /usr/local/bin/chromedriver From 33a8b6554ae1836969ab583876a41ea60d0151f0 Mon Sep 17 00:00:00 2001 From: "smithery-ai[bot]" <194235850+smithery-ai[bot]@users.noreply.github.com> Date: Sun, 13 Apr 2025 22:23:50 +0000 Subject: [PATCH 003/565] Update README --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index e3434efc..b9c1d018 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # LinkedIn MCP Server +[![smithery badge](https://smithery.ai/badge/@stickerdaniel/linkedin-mcp-server)](https://smithery.ai/server/@stickerdaniel/linkedin-mcp-server) + A Model Context Protocol (MCP) server that enables interaction with LinkedIn through Claude and other AI assistants. This server allows you to scrape LinkedIn profiles, companies, jobs, and perform job searches. ## ๐Ÿ“‹ Features @@ -10,6 +12,14 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn thr ## ๐Ÿ”ง Installation +### Installing via Smithery + +To install LinkedIn MCP Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@stickerdaniel/linkedin-mcp-server): + +```bash +npx -y @smithery/cli install @stickerdaniel/linkedin-mcp-server --client claude +``` + ### Prerequisites - Python 3.8 or higher From 8c42c4724020cfec31e61953607b26d9397841a3 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Tue, 22 Apr 2025 18:58:30 -0400 Subject: [PATCH 004/565] Update README.md --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index b9c1d018..494aedc3 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,10 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn through Claude and other AI assistants. This server allows you to scrape LinkedIn profiles, companies, jobs, and perform job searches. + +https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 + + ## ๐Ÿ“‹ Features - **Profile Scraping**: Get detailed information from LinkedIn profiles From 924fb129757088295828e18c871c1d2b9b5ffead Mon Sep 17 00:00:00 2001 From: Hritik Raj Date: Sun, 27 Apr 2025 15:18:03 +0530 Subject: [PATCH 005/565] moved main inside src Signed-off-by: Hritik Raj --- main.py => src/main.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename main.py => src/main.py (100%) diff --git a/main.py b/src/main.py similarity index 100% rename from main.py rename to src/main.py From 8da4c5e1e3ce35956b1f0fd94c61a91ea8e807a9 Mon Sep 17 00:00:00 2001 From: Hritik Raj Date: Sun, 27 Apr 2025 16:42:00 +0530 Subject: [PATCH 006/565] added SSE integration Signed-off-by: Hritik Raj --- src/main.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/src/main.py b/src/main.py index 6d06a6b7..a5497ccb 100644 --- a/src/main.py +++ b/src/main.py @@ -7,14 +7,19 @@ import sys import logging +import uvicorn from typing import NoReturn +from fastapi import FastAPI from linkedin_mcp_server.arguments import parse_arguments from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler +# Initialize FastAPI app +app = FastAPI() + def main() -> None: """Initialize and run the LinkedIn MCP server.""" print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") @@ -42,8 +47,22 @@ def main() -> None: # Create and run the MCP server mcp = create_mcp_server() - print("\n๐Ÿš€ Running LinkedIn MCP server...") - mcp.run(transport="stdio") + + # Ask the user which mode they want + print("\nChoose transport mode:") + print("1. stdio (default)") + print("2. sse (Server-Sent Events)") + + choice = input("Enter 1 or 2 [1]: ").strip() or "1" + + if choice == "2": + # Run the FastAPI SSE server + print("\n๐Ÿš€ Running LinkedIn MCP server (SSE mode)...") + uvicorn.run(app, host="0.0.0.0", port=8000) + else: + # Run using stdio + print("\n๐Ÿš€ Running LinkedIn MCP server (STDIO mode)...") + mcp.run(transport="stdio") def exit_gracefully(exit_code: int = 0) -> NoReturn: From 1436ef2a90ac27f9551cf035c40e3d1801593549 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 28 Apr 2025 16:57:19 -0400 Subject: [PATCH 007/565] Update README.md --- README.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/README.md b/README.md index 494aedc3..3759357e 100644 --- a/README.md +++ b/README.md @@ -16,14 +16,6 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ## ๐Ÿ”ง Installation -### Installing via Smithery - -To install LinkedIn MCP Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@stickerdaniel/linkedin-mcp-server): - -```bash -npx -y @smithery/cli install @stickerdaniel/linkedin-mcp-server --client claude -``` - ### Prerequisites - Python 3.8 or higher From 48471ed34c581a097c37be61e320a10caf8a5a56 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 28 Apr 2025 17:23:41 -0400 Subject: [PATCH 008/565] fix(installer): Update tip message for ChromeDriver installation instructions --- src/linkedin_mcp_server/drivers/chrome.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/linkedin_mcp_server/drivers/chrome.py b/src/linkedin_mcp_server/drivers/chrome.py index 306f3437..39890f2f 100644 --- a/src/linkedin_mcp_server/drivers/chrome.py +++ b/src/linkedin_mcp_server/drivers/chrome.py @@ -260,7 +260,7 @@ def initialize_driver(headless: bool = True, lazy_init: bool = False) -> None: print("โš ๏ธ ChromeDriver not found in common locations.") print("โšก Continuing with automatic detection...") print( - "๐Ÿ’ก Tip: For better results, install ChromeDriver and set the CHROMEDRIVER environment variable" + "๐Ÿ’ก Tip: install ChromeDriver and set the CHROMEDRIVER environment variable" ) # Create driver and log in From 61affb54f5d5185b562de1767618463e231a74b1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 28 Apr 2025 17:37:21 -0400 Subject: [PATCH 009/565] fix(docs): Add missing command to install package in installation instructions --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3759357e..58763360 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,7 @@ Using `uv`: ```bash uv add "mcp[cli]" selenium httpx inquirer pyperclip uv add "git+https://github.com/stickerdaniel/linkedin_scraper.git" +uv pip install -e . ``` ### Step 4: Install ChromeDriver From fc853ed2f6c79567f20e0e573021cce46a5f2aa3 Mon Sep 17 00:00:00 2001 From: Hritik Raj Date: Tue, 29 Apr 2025 10:05:58 +0530 Subject: [PATCH 010/565] added inquirer integration Signed-off-by: Hritik Raj --- src/main.py => main.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) rename src/main.py => main.py (83%) diff --git a/src/main.py b/main.py similarity index 83% rename from src/main.py rename to main.py index a5497ccb..a2dc0009 100644 --- a/src/main.py +++ b/main.py @@ -8,6 +8,7 @@ import sys import logging import uvicorn +import inquirer from typing import NoReturn from fastapi import FastAPI @@ -48,14 +49,21 @@ def main() -> None: # Create and run the MCP server mcp = create_mcp_server() - # Ask the user which mode they want - print("\nChoose transport mode:") - print("1. stdio (default)") - print("2. sse (Server-Sent Events)") - - choice = input("Enter 1 or 2 [1]: ").strip() or "1" - - if choice == "2": + questions = [ + inquirer.List( + "transport", + message="Choose transport mode", + choices=[ + ("stdio (Default CLI mode)", "stdio"), + ("sse (Server-Sent Events HTTP mode)", "sse") + ], + default="stdio" + ) + ] + answers = inquirer.prompt(questions) + transport_choice = answers["transport"] + + if transport_choice == "sse": # Run the FastAPI SSE server print("\n๐Ÿš€ Running LinkedIn MCP server (SSE mode)...") uvicorn.run(app, host="0.0.0.0", port=8000) From c6958919f9bb3b2f609466d9bb6223a420945bb5 Mon Sep 17 00:00:00 2001 From: Hritik Raj Date: Thu, 1 May 2025 11:43:44 +0530 Subject: [PATCH 011/565] Added setup functionality Signed-off-by: Hritik Raj --- main.py | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/main.py b/main.py index a2dc0009..cde1722e 100644 --- a/main.py +++ b/main.py @@ -21,6 +21,22 @@ # Initialize FastAPI app app = FastAPI() +def choose_transport_interactive() -> str: + """Prompt user for transport mode using inquirer.""" + questions = [ + inquirer.List( + "transport", + message="Choose transport mode", + choices=[ + ("stdio (Default CLI mode)", "stdio"), + ("sse (Server-Sent Events HTTP mode)", "sse") + ], + default="stdio" + ) + ] + answers = inquirer.prompt(questions) + return answers["transport"] + def main() -> None: """Initialize and run the LinkedIn MCP server.""" print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") @@ -49,22 +65,14 @@ def main() -> None: # Create and run the MCP server mcp = create_mcp_server() - questions = [ - inquirer.List( - "transport", - message="Choose transport mode", - choices=[ - ("stdio (Default CLI mode)", "stdio"), - ("sse (Server-Sent Events HTTP mode)", "sse") - ], - default="stdio" - ) - ] - answers = inquirer.prompt(questions) - transport_choice = answers["transport"] + # Decide transport + if args.setup: + transport = choose_transport_interactive() + else: + transport = "stdio" # Default to stdio without prompt - if transport_choice == "sse": - # Run the FastAPI SSE server + # Start server + if transport == "sse": print("\n๐Ÿš€ Running LinkedIn MCP server (SSE mode)...") uvicorn.run(app, host="0.0.0.0", port=8000) else: From cba2748c803c9daeefca9a699b8a8e04bde09439 Mon Sep 17 00:00:00 2001 From: Hritik Raj Date: Thu, 1 May 2025 11:46:37 +0530 Subject: [PATCH 012/565] Added setup argument Signed-off-by: Hritik Raj --- src/linkedin_mcp_server/arguments.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/linkedin_mcp_server/arguments.py b/src/linkedin_mcp_server/arguments.py index 96f2ca85..4db277ef 100644 --- a/src/linkedin_mcp_server/arguments.py +++ b/src/linkedin_mcp_server/arguments.py @@ -54,6 +54,12 @@ def parse_arguments() -> ServerArguments: help="Initialize Chrome driver and login immediately (not recommended for most users)", ) + parser.add_argument( + "--setup", + action="store_true", + help="Enable interactive setup mode" + ) + args = parser.parse_args() return ServerArguments( From 349e05e92cd5784355218042e2d02725bb679dbf Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 2 May 2025 14:37:21 -0400 Subject: [PATCH 013/565] a few tweaks --- main.py | 3 +- pyproject.toml | 4 + src/linkedin_mcp_server/arguments.py | 8 +- src/linkedin_mcp_server/drivers/chrome.py | 2 +- src/linkedin_mcp_server/py.typed | 0 uv.lock | 766 +++++++++++++++------- 6 files changed, 520 insertions(+), 263 deletions(-) create mode 100644 src/linkedin_mcp_server/py.typed diff --git a/main.py b/main.py index cde1722e..1688f2f9 100644 --- a/main.py +++ b/main.py @@ -8,9 +8,8 @@ import sys import logging import uvicorn -import inquirer +import inquirer # type: ignore # third-party package without type stubs from typing import NoReturn - from fastapi import FastAPI from linkedin_mcp_server.arguments import parse_arguments from linkedin_mcp_server.cli import print_claude_config diff --git a/pyproject.toml b/pyproject.toml index 8e14fe07..0e1d535f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ description = "Add your description here" readme = "README.md" requires-python = ">=3.12" dependencies = [ + "fastapi[standard]>=0.115.12", "httpx>=0.28.1", "inquirer>=3.4.0", "linkedin-scraper", @@ -16,3 +17,6 @@ dependencies = [ [tool.uv.sources] linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git" } + +[tool.setuptools.package-data] +linkedin_mcp_server = ["py.typed"] diff --git a/src/linkedin_mcp_server/arguments.py b/src/linkedin_mcp_server/arguments.py index 4db277ef..01dc14c9 100644 --- a/src/linkedin_mcp_server/arguments.py +++ b/src/linkedin_mcp_server/arguments.py @@ -45,7 +45,7 @@ def parse_arguments() -> ServerArguments: parser.add_argument( "--no-setup", action="store_true", - help="Skip printing configuration information", + help="Skip printing configuration information and interactive setup", ) parser.add_argument( @@ -54,12 +54,6 @@ def parse_arguments() -> ServerArguments: help="Initialize Chrome driver and login immediately (not recommended for most users)", ) - parser.add_argument( - "--setup", - action="store_true", - help="Enable interactive setup mode" - ) - args = parser.parse_args() return ServerArguments( diff --git a/src/linkedin_mcp_server/drivers/chrome.py b/src/linkedin_mcp_server/drivers/chrome.py index 39890f2f..7bb28086 100644 --- a/src/linkedin_mcp_server/drivers/chrome.py +++ b/src/linkedin_mcp_server/drivers/chrome.py @@ -10,7 +10,7 @@ import sys import logging from pathlib import Path -import inquirer +import inquirer # type: ignore from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service diff --git a/src/linkedin_mcp_server/py.typed b/src/linkedin_mcp_server/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/uv.lock b/uv.lock index dcb60157..d698ecb6 100644 --- a/uv.lock +++ b/uv.lock @@ -1,23 +1,23 @@ version = 1 -revision = 1 +revision = 2 requires-python = ">=3.12" [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload_time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload_time = "2024-05-20T21:33:24.1Z" }, ] [[package]] name = "ansicon" version = "1.89.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/e2/1c866404ddbd280efedff4a9f15abfe943cb83cde6e895022370f3a61f85/ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1", size = 67312 } +sdist = { url = "https://files.pythonhosted.org/packages/b6/e2/1c866404ddbd280efedff4a9f15abfe943cb83cde6e895022370f3a61f85/ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1", size = 67312, upload_time = "2019-04-29T20:23:57.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/75/f9/f1c10e223c7b56a38109a3f2eb4e7fe9a757ea3ed3a166754fb30f65e466/ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec", size = 63675 }, + { url = "https://files.pythonhosted.org/packages/75/f9/f1c10e223c7b56a38109a3f2eb4e7fe9a757ea3ed3a166754fb30f65e466/ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec", size = 63675, upload_time = "2019-04-29T20:23:53.83Z" }, ] [[package]] @@ -29,18 +29,18 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload_time = "2025-03-17T00:02:54.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload_time = "2025-03-17T00:02:52.713Z" }, ] [[package]] name = "attrs" version = "25.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload_time = "2025-03-13T11:10:22.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 }, + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload_time = "2025-03-13T11:10:21.14Z" }, ] [[package]] @@ -52,18 +52,18 @@ dependencies = [ { name = "six" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/ae/92e9968ad23205389ec6bd82e2d4fca3817f1cdef34e10aa8d529ef8b1d7/blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680", size = 6655612 } +sdist = { url = "https://files.pythonhosted.org/packages/25/ae/92e9968ad23205389ec6bd82e2d4fca3817f1cdef34e10aa8d529ef8b1d7/blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680", size = 6655612, upload_time = "2023-02-04T02:25:45.886Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372 }, + { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372, upload_time = "2023-02-04T02:25:43.093Z" }, ] [[package]] name = "certifi" version = "2025.1.31" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload_time = "2025-01-31T02:16:47.166Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, + { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload_time = "2025-01-31T02:16:45.015Z" }, ] [[package]] @@ -73,56 +73,56 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload_time = "2024-09-04T20:45:21.852Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload_time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload_time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload_time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload_time = "2024-09-04T20:44:45.309Z" }, ] [[package]] name = "cfgv" version = "3.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload_time = "2023-08-12T20:38:17.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload_time = "2023-08-12T20:38:16.269Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload_time = "2024-12-24T18:12:35.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload_time = "2024-12-24T18:10:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload_time = "2024-12-24T18:10:44.272Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload_time = "2024-12-24T18:10:45.492Z" }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload_time = "2024-12-24T18:10:47.898Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload_time = "2024-12-24T18:10:50.589Z" }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload_time = "2024-12-24T18:10:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload_time = "2024-12-24T18:10:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload_time = "2024-12-24T18:10:55.048Z" }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload_time = "2024-12-24T18:10:57.647Z" }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload_time = "2024-12-24T18:10:59.43Z" }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload_time = "2024-12-24T18:11:00.676Z" }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload_time = "2024-12-24T18:11:01.952Z" }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload_time = "2024-12-24T18:11:03.142Z" }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload_time = "2024-12-24T18:11:05.834Z" }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload_time = "2024-12-24T18:11:07.064Z" }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload_time = "2024-12-24T18:11:08.374Z" }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload_time = "2024-12-24T18:11:09.831Z" }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload_time = "2024-12-24T18:11:12.03Z" }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload_time = "2024-12-24T18:11:13.372Z" }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload_time = "2024-12-24T18:11:14.628Z" }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload_time = "2024-12-24T18:11:17.672Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload_time = "2024-12-24T18:11:18.989Z" }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload_time = "2024-12-24T18:11:21.507Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload_time = "2024-12-24T18:11:22.774Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload_time = "2024-12-24T18:11:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload_time = "2024-12-24T18:11:26.535Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload_time = "2024-12-24T18:12:32.852Z" }, ] [[package]] @@ -132,27 +132,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload_time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload_time = "2024-12-21T18:38:41.666Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload_time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload_time = "2022-10-25T02:36:20.889Z" }, ] [[package]] name = "distlib" version = "0.3.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload_time = "2024-10-09T18:35:47.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload_time = "2024-10-09T18:35:44.272Z" }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload_time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload_time = "2024-10-05T20:14:57.687Z" }, ] [[package]] @@ -163,27 +172,83 @@ dependencies = [ { name = "runs" }, { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/92/734a4ab345914259cb6146fd36512608ea42be16195375c379046f33283d/editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8", size = 3197 } +sdist = { url = "https://files.pythonhosted.org/packages/2a/92/734a4ab345914259cb6146fd36512608ea42be16195375c379046f33283d/editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8", size = 3197, upload_time = "2024-01-25T10:44:59.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017 }, + { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload_time = "2024-01-25T10:44:58.66Z" }, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload_time = "2024-06-20T11:30:30.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload_time = "2024-06-20T11:30:28.248Z" }, +] + +[[package]] +name = "fastapi" +version = "0.115.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload_time = "2025-03-23T22:55:43.822Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload_time = "2025-03-23T22:55:42.101Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753, upload_time = "2024-12-15T14:28:10.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705, upload_time = "2024-12-15T14:28:06.18Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "uvicorn", extra = ["standard"] }, ] [[package]] name = "filelock" version = "3.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload_time = "2025-03-14T07:11:40.47Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 }, + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload_time = "2025-03-14T07:11:39.145Z" }, ] [[package]] name = "h11" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload_time = "2022-09-25T15:40:01.519Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload_time = "2022-09-25T15:39:59.68Z" }, ] [[package]] @@ -194,9 +259,31 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385 } +sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385, upload_time = "2025-04-11T14:42:46.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732, upload_time = "2025-04-11T14:42:44.896Z" }, +] + +[[package]] +name = "httptools" +version = "0.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload_time = "2024-10-16T19:45:08.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732 }, + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload_time = "2024-10-16T19:44:30.175Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload_time = "2024-10-16T19:44:31.786Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload_time = "2024-10-16T19:44:32.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload_time = "2024-10-16T19:44:33.974Z" }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload_time = "2024-10-16T19:44:35.111Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload_time = "2024-10-16T19:44:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload_time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload_time = "2024-10-16T19:44:38.738Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload_time = "2024-10-16T19:44:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload_time = "2024-10-16T19:44:41.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload_time = "2024-10-16T19:44:42.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload_time = "2024-10-16T19:44:43.959Z" }, + { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload_time = "2024-10-16T19:44:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload_time = "2024-10-16T19:44:46.46Z" }, ] [[package]] @@ -209,36 +296,36 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload_time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload_time = "2024-12-06T15:37:21.509Z" }, ] [[package]] name = "httpx-sse" version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload_time = "2023-12-22T08:01:21.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload_time = "2023-12-22T08:01:19.89Z" }, ] [[package]] name = "identify" version = "2.6.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249 } +sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249, upload_time = "2025-03-08T15:54:13.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101 }, + { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101, upload_time = "2025-03-08T15:54:12.026Z" }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload_time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload_time = "2024-09-15T18:07:37.964Z" }, ] [[package]] @@ -250,9 +337,21 @@ dependencies = [ { name = "editor" }, { name = "readchar" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/06/ef91eb8f3feafb736aa33dcb278fc9555d17861aa571b684715d095db24d/inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b", size = 14472 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/06/ef91eb8f3feafb736aa33dcb278fc9555d17861aa571b684715d095db24d/inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b", size = 14472, upload_time = "2024-08-12T12:03:43.83Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/b2/be907c8c0f8303bc4b10089f5470014c3bf3521e9b8d3decf3037fd94725/inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60", size = 18077, upload_time = "2024-08-12T12:03:41.589Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload_time = "2025-03-05T20:05:02.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/b2/be907c8c0f8303bc4b10089f5470014c3bf3521e9b8d3decf3037fd94725/inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60", size = 18077 }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload_time = "2025-03-05T20:05:00.369Z" }, ] [[package]] @@ -262,9 +361,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ansicon", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/d0/59b2b80e7a52d255f9e0ad040d2e826342d05580c4b1d7d7747cfb8db731/jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf", size = 80981 } +sdist = { url = "https://files.pythonhosted.org/packages/20/d0/59b2b80e7a52d255f9e0ad040d2e826342d05580c4b1d7d7747cfb8db731/jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf", size = 80981, upload_time = "2024-07-31T22:39:18.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085 }, + { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload_time = "2024-07-31T22:39:17.426Z" }, ] [[package]] @@ -272,6 +371,7 @@ name = "linkedin-mcp-server" version = "0.1.0" source = { virtual = "." } dependencies = [ + { name = "fastapi", extra = ["standard"] }, { name = "httpx" }, { name = "inquirer" }, { name = "linkedin-scraper" }, @@ -283,6 +383,7 @@ dependencies = [ [package.metadata] requires-dist = [ + { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, @@ -306,42 +407,42 @@ dependencies = [ name = "lxml" version = "5.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212 }, - { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439 }, - { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146 }, - { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082 }, - { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300 }, - { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655 }, - { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795 }, - { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791 }, - { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807 }, - { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213 }, - { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694 }, - { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865 }, - { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383 }, - { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864 }, - { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819 }, - { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177 }, - { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134 }, - { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598 }, - { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586 }, - { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447 }, - { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583 }, - { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684 }, - { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797 }, - { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302 }, - { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247 }, - { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824 }, - { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079 }, - { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041 }, - { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761 }, - { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209 }, - { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231 }, - { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899 }, - { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315 }, - { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639 }, +sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948, upload_time = "2025-04-05T18:31:58.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212, upload_time = "2025-04-05T18:26:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439, upload_time = "2025-04-05T18:26:46.468Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146, upload_time = "2025-04-05T18:26:49.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082, upload_time = "2025-04-05T18:26:52.295Z" }, + { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300, upload_time = "2025-04-05T18:26:54.923Z" }, + { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655, upload_time = "2025-04-05T18:26:57.488Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795, upload_time = "2025-04-05T18:27:00.126Z" }, + { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791, upload_time = "2025-04-05T18:27:03.061Z" }, + { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807, upload_time = "2025-04-05T18:27:05.877Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213, upload_time = "2025-04-05T18:27:08.588Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694, upload_time = "2025-04-05T18:27:11.66Z" }, + { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865, upload_time = "2025-04-05T18:27:14.194Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383, upload_time = "2025-04-05T18:27:16.988Z" }, + { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864, upload_time = "2025-04-05T18:27:19.703Z" }, + { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819, upload_time = "2025-04-05T18:27:22.814Z" }, + { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177, upload_time = "2025-04-05T18:27:25.078Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134, upload_time = "2025-04-05T18:27:27.481Z" }, + { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598, upload_time = "2025-04-05T18:27:31.229Z" }, + { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586, upload_time = "2025-04-05T18:27:33.883Z" }, + { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447, upload_time = "2025-04-05T18:27:36.426Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583, upload_time = "2025-04-05T18:27:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684, upload_time = "2025-04-05T18:27:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797, upload_time = "2025-04-05T18:27:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302, upload_time = "2025-04-05T18:27:47.979Z" }, + { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247, upload_time = "2025-04-05T18:27:51.174Z" }, + { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824, upload_time = "2025-04-05T18:27:54.15Z" }, + { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079, upload_time = "2025-04-05T18:27:57.03Z" }, + { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041, upload_time = "2025-04-05T18:27:59.918Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761, upload_time = "2025-04-05T18:28:02.83Z" }, + { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209, upload_time = "2025-04-05T18:28:05.851Z" }, + { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231, upload_time = "2025-04-05T18:28:08.849Z" }, + { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899, upload_time = "2025-04-05T18:28:11.729Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315, upload_time = "2025-04-05T18:28:14.815Z" }, + { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639, upload_time = "2025-04-05T18:28:17.268Z" }, ] [[package]] @@ -351,9 +452,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload_time = "2023-06-03T06:41:14.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload_time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload_time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload_time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload_time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload_time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload_time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload_time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload_time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload_time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload_time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload_time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload_time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload_time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload_time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload_time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload_time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload_time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload_time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload_time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload_time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload_time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload_time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload_time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload_time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload_time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload_time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload_time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload_time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload_time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload_time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload_time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload_time = "2024-10-18T15:21:42.784Z" }, ] [[package]] @@ -370,9 +509,9 @@ dependencies = [ { name = "starlette" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031 } +sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload_time = "2025-03-27T16:46:32.336Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077 }, + { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload_time = "2025-03-27T16:46:29.919Z" }, ] [package.optional-dependencies] @@ -385,9 +524,9 @@ cli = [ name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload_time = "2022-08-14T12:40:10.846Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload_time = "2022-08-14T12:40:09.779Z" }, ] [[package]] @@ -398,39 +537,39 @@ dependencies = [ { name = "mypy-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } +sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717, upload_time = "2025-02-05T03:50:34.655Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, - { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, - { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, - { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, - { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, - { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, - { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, - { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, - { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, - { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, - { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, - { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, - { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, + { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981, upload_time = "2025-02-05T03:50:28.25Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175, upload_time = "2025-02-05T03:50:13.411Z" }, + { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675, upload_time = "2025-02-05T03:50:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020, upload_time = "2025-02-05T03:48:48.705Z" }, + { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582, upload_time = "2025-02-05T03:49:03.628Z" }, + { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614, upload_time = "2025-02-05T03:50:00.313Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592, upload_time = "2025-02-05T03:48:55.789Z" }, + { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611, upload_time = "2025-02-05T03:48:44.581Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443, upload_time = "2025-02-05T03:49:25.514Z" }, + { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541, upload_time = "2025-02-05T03:49:57.623Z" }, + { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348, upload_time = "2025-02-05T03:48:52.361Z" }, + { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648, upload_time = "2025-02-05T03:49:11.395Z" }, + { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload_time = "2025-02-05T03:50:08.348Z" }, ] [[package]] name = "mypy-extensions" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload_time = "2023-02-04T12:11:27.157Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload_time = "2023-02-04T12:11:25.002Z" }, ] [[package]] name = "nodeenv" version = "1.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload_time = "2024-06-04T18:44:11.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload_time = "2024-06-04T18:44:08.352Z" }, ] [[package]] @@ -440,18 +579,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060 } +sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload_time = "2023-10-26T04:26:04.361Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692 }, + { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload_time = "2023-10-26T04:26:02.532Z" }, ] [[package]] name = "platformdirs" version = "4.3.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291 } +sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291, upload_time = "2025-03-19T20:36:10.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499 }, + { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499, upload_time = "2025-03-19T20:36:09.038Z" }, ] [[package]] @@ -465,18 +604,18 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424 } +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload_time = "2025-03-18T21:35:20.987Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707 }, + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload_time = "2025-03-18T21:35:19.343Z" }, ] [[package]] name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload_time = "2024-03-30T13:22:22.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload_time = "2024-03-30T13:22:20.476Z" }, ] [[package]] @@ -489,9 +628,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513 } +sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload_time = "2025-04-08T13:27:06.399Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591 }, + { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload_time = "2025-04-08T13:27:03.789Z" }, ] [[package]] @@ -501,39 +640,39 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640 }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649 }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472 }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509 }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702 }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428 }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753 }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849 }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541 }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225 }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373 }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034 }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848 }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986 }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551 }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785 }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758 }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109 }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159 }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222 }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980 }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840 }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518 }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025 }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991 }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262 }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626 }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590 }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963 }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896 }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810 }, +sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload_time = "2025-04-02T09:49:41.8Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload_time = "2025-04-02T09:47:25.394Z" }, + { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload_time = "2025-04-02T09:47:27.417Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload_time = "2025-04-02T09:47:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload_time = "2025-04-02T09:47:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload_time = "2025-04-02T09:47:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload_time = "2025-04-02T09:47:37.315Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload_time = "2025-04-02T09:47:39.013Z" }, + { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload_time = "2025-04-02T09:47:40.427Z" }, + { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload_time = "2025-04-02T09:47:42.01Z" }, + { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload_time = "2025-04-02T09:47:43.425Z" }, + { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload_time = "2025-04-02T09:47:44.979Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload_time = "2025-04-02T09:47:46.843Z" }, + { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload_time = "2025-04-02T09:47:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload_time = "2025-04-02T09:47:49.839Z" }, + { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload_time = "2025-04-02T09:47:51.648Z" }, + { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload_time = "2025-04-02T09:47:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload_time = "2025-04-02T09:47:55.006Z" }, + { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload_time = "2025-04-02T09:47:56.532Z" }, + { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload_time = "2025-04-02T09:47:58.088Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload_time = "2025-04-02T09:47:59.591Z" }, + { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload_time = "2025-04-02T09:48:01.397Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload_time = "2025-04-02T09:48:03.056Z" }, + { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload_time = "2025-04-02T09:48:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload_time = "2025-04-02T09:48:06.226Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload_time = "2025-04-02T09:48:08.114Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload_time = "2025-04-02T09:48:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload_time = "2025-04-02T09:48:11.288Z" }, + { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload_time = "2025-04-02T09:48:12.861Z" }, + { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload_time = "2025-04-02T09:48:14.553Z" }, + { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload_time = "2025-04-02T09:48:16.222Z" }, + { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload_time = "2025-04-02T09:48:17.97Z" }, ] [[package]] @@ -544,77 +683,86 @@ dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 } +sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550, upload_time = "2025-02-27T10:10:32.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 }, + { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839, upload_time = "2025-02-27T10:10:30.711Z" }, ] [[package]] name = "pygments" version = "2.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload_time = "2025-01-06T17:26:30.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload_time = "2025-01-06T17:26:25.553Z" }, ] [[package]] name = "pyperclip" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961 } +sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload_time = "2024-06-18T20:38:48.401Z" } [[package]] name = "pysocks" version = "1.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload_time = "2019-09-20T02:07:35.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725 }, + { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload_time = "2019-09-20T02:06:22.938Z" }, ] [[package]] name = "python-dotenv" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload_time = "2025-03-25T10:14:56.835Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload_time = "2025-03-25T10:14:55.034Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload_time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload_time = "2024-12-16T19:45:44.423Z" }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload_time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload_time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload_time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload_time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload_time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload_time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload_time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload_time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload_time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload_time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload_time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload_time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload_time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload_time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload_time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload_time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload_time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload_time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload_time = "2024-08-06T20:33:04.33Z" }, ] [[package]] name = "readchar" version = "4.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685 } +sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685, upload_time = "2024-11-04T18:28:07.757Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350 }, + { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload_time = "2024-11-04T18:28:02.859Z" }, ] [[package]] @@ -627,9 +775,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload_time = "2024-05-29T15:37:49.536Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload_time = "2024-05-29T15:37:47.027Z" }, ] [[package]] @@ -640,9 +788,23 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload_time = "2025-03-30T14:15:14.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload_time = "2025-03-30T14:15:12.283Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.14.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/33/18332e1359803ae6407a1e605a6bdb253a426ffe931555f1299f9e39eece/rich_toolkit-0.14.4.tar.gz", hash = "sha256:db256cf45165cae381c9bbf3b48a0fd4d99a07c80155cc655c80212a62e28fe1", size = 104487, upload_time = "2025-04-29T19:43:36.904Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, + { url = "https://files.pythonhosted.org/packages/44/48/c6d43d4c56c45c0171c771b2b73deeec493efb57795b651319201e7c4638/rich_toolkit-0.14.4-py3-none-any.whl", hash = "sha256:cc71ebee83eaa122d8e42882408bc5a4bf0240bbf1e368811ee56d249b3d742a", size = 24258, upload_time = "2025-04-29T19:43:35.502Z" }, ] [[package]] @@ -652,9 +814,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/6d/b9aace390f62db5d7d2c77eafce3d42774f27f1829d24fa9b6f598b3ef71/runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1", size = 5474 } +sdist = { url = "https://files.pythonhosted.org/packages/26/6d/b9aace390f62db5d7d2c77eafce3d42774f27f1829d24fa9b6f598b3ef71/runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1", size = 5474, upload_time = "2024-01-25T14:44:01.563Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/d6/17caf2e4af1dec288477a0cbbe4a96fbc9b8a28457dce3f1f452630ce216/runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd", size = 7033 }, + { url = "https://files.pythonhosted.org/packages/86/d6/17caf2e4af1dec288477a0cbbe4a96fbc9b8a28457dce3f1f452630ce216/runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd", size = 7033, upload_time = "2024-01-25T14:43:59.959Z" }, ] [[package]] @@ -669,45 +831,45 @@ dependencies = [ { name = "urllib3", extra = ["socks"] }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/bf/642cce8b5a9edad8e4880fdefbeb24f69bec2086b1121c63f883c412b797/selenium-4.31.0.tar.gz", hash = "sha256:441cffc436a2e6659fe3cfb012692435652efd38b0d368d16f661a5db47825f5", size = 855418 } +sdist = { url = "https://files.pythonhosted.org/packages/e0/bf/642cce8b5a9edad8e4880fdefbeb24f69bec2086b1121c63f883c412b797/selenium-4.31.0.tar.gz", hash = "sha256:441cffc436a2e6659fe3cfb012692435652efd38b0d368d16f661a5db47825f5", size = 855418, upload_time = "2025-04-05T00:43:06.447Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/53/212db779d2481b0a8428365960596f8d5a4d482ae12c441d0507fd54aaf2/selenium-4.31.0-py3-none-any.whl", hash = "sha256:7b8b8d5e424d7133cb7aa656263b19ac505ec26d65c0f921a696e7e2c5ccd95b", size = 9350584 }, + { url = "https://files.pythonhosted.org/packages/32/53/212db779d2481b0a8428365960596f8d5a4d482ae12c441d0507fd54aaf2/selenium-4.31.0-py3-none-any.whl", hash = "sha256:7b8b8d5e424d7133cb7aa656263b19ac505ec26d65c0f921a696e7e2c5ccd95b", size = 9350584, upload_time = "2025-04-05T00:43:04.04Z" }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload_time = "2023-10-24T04:13:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload_time = "2023-10-24T04:13:38.866Z" }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload_time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload_time = "2024-12-04T17:35:26.475Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload_time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload_time = "2024-02-25T23:20:01.196Z" }, ] [[package]] name = "sortedcontainers" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload_time = "2021-05-16T22:03:42.897Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload_time = "2021-05-16T22:03:41.177Z" }, ] [[package]] @@ -718,9 +880,9 @@ dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } +sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376, upload_time = "2024-12-25T09:09:30.616Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, + { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120, upload_time = "2024-12-25T09:09:26.761Z" }, ] [[package]] @@ -730,9 +892,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } +sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102, upload_time = "2025-03-08T10:55:34.504Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, + { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995, upload_time = "2025-03-08T10:55:32.662Z" }, ] [[package]] @@ -747,9 +909,9 @@ dependencies = [ { name = "sniffio" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952, upload_time = "2025-02-14T07:13:50.724Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 }, + { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920, upload_time = "2025-02-14T07:13:48.696Z" }, ] [[package]] @@ -761,9 +923,9 @@ dependencies = [ { name = "trio" }, { name = "wsproto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/3c/8b4358e81f2f2cfe71b66a267f023a91db20a817b9425dd964873796980a/trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae", size = 33549 } +sdist = { url = "https://files.pythonhosted.org/packages/d1/3c/8b4358e81f2f2cfe71b66a267f023a91db20a817b9425dd964873796980a/trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae", size = 33549, upload_time = "2025-02-25T05:16:58.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221 }, + { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221, upload_time = "2025-02-25T05:16:57.545Z" }, ] [[package]] @@ -776,18 +938,18 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } +sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711, upload_time = "2025-02-27T19:17:34.807Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, + { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061, upload_time = "2025-02-27T19:17:32.111Z" }, ] [[package]] name = "typing-extensions" version = "4.13.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload_time = "2025-04-10T14:19:05.416Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload_time = "2025-04-10T14:19:03.967Z" }, ] [[package]] @@ -797,18 +959,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } +sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload_time = "2025-02-25T17:27:59.638Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, + { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload_time = "2025-02-25T17:27:57.754Z" }, ] [[package]] name = "urllib3" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload_time = "2025-04-10T15:23:39.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 }, + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload_time = "2025-04-10T15:23:37.377Z" }, ] [package.optional-dependencies] @@ -824,9 +986,40 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568, upload_time = "2024-12-15T13:33:30.42Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload_time = "2024-12-15T13:33:27.467Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload_time = "2024-10-14T23:38:35.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload_time = "2024-10-14T23:37:47.833Z" }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload_time = "2024-10-14T23:37:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload_time = "2024-10-14T23:37:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload_time = "2024-10-14T23:37:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload_time = "2024-10-14T23:37:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload_time = "2024-10-14T23:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload_time = "2024-10-14T23:38:00.688Z" }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload_time = "2024-10-14T23:38:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload_time = "2024-10-14T23:38:04.711Z" }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload_time = "2024-10-14T23:38:06.385Z" }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload_time = "2024-10-14T23:38:08.416Z" }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload_time = "2024-10-14T23:38:10.888Z" }, ] [[package]] @@ -838,27 +1031,94 @@ dependencies = [ { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/e0/633e369b91bbc664df47dcb5454b6c7cf441e8f5b9d0c250ce9f0546401e/virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", size = 4346945 } +sdist = { url = "https://files.pythonhosted.org/packages/38/e0/633e369b91bbc664df47dcb5454b6c7cf441e8f5b9d0c250ce9f0546401e/virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", size = 4346945, upload_time = "2025-03-31T16:33:29.185Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/ed/3cfeb48175f0671ec430ede81f628f9fb2b1084c9064ca67ebe8c0ed6a05/virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6", size = 4329461, upload_time = "2025-03-31T16:33:26.758Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload_time = "2025-04-08T10:36:26.722Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/ed/3cfeb48175f0671ec430ede81f628f9fb2b1084c9064ca67ebe8c0ed6a05/virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6", size = 4329461 }, + { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511, upload_time = "2025-04-08T10:35:17.956Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715, upload_time = "2025-04-08T10:35:19.202Z" }, + { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138, upload_time = "2025-04-08T10:35:20.586Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592, upload_time = "2025-04-08T10:35:21.87Z" }, + { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532, upload_time = "2025-04-08T10:35:23.143Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865, upload_time = "2025-04-08T10:35:24.702Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887, upload_time = "2025-04-08T10:35:25.969Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498, upload_time = "2025-04-08T10:35:27.353Z" }, + { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663, upload_time = "2025-04-08T10:35:28.685Z" }, + { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410, upload_time = "2025-04-08T10:35:30.42Z" }, + { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965, upload_time = "2025-04-08T10:35:32.023Z" }, + { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693, upload_time = "2025-04-08T10:35:33.225Z" }, + { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287, upload_time = "2025-04-08T10:35:34.568Z" }, + { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531, upload_time = "2025-04-08T10:35:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417, upload_time = "2025-04-08T10:35:37.048Z" }, + { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423, upload_time = "2025-04-08T10:35:38.357Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185, upload_time = "2025-04-08T10:35:39.708Z" }, + { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696, upload_time = "2025-04-08T10:35:41.469Z" }, + { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327, upload_time = "2025-04-08T10:35:43.289Z" }, + { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741, upload_time = "2025-04-08T10:35:44.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995, upload_time = "2025-04-08T10:35:46.336Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693, upload_time = "2025-04-08T10:35:48.161Z" }, + { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677, upload_time = "2025-04-08T10:35:49.65Z" }, + { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804, upload_time = "2025-04-08T10:35:51.093Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087, upload_time = "2025-04-08T10:35:52.458Z" }, ] [[package]] name = "wcwidth" version = "0.2.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload_time = "2024-01-06T02:10:57.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload_time = "2024-01-06T02:10:55.763Z" }, ] [[package]] name = "websocket-client" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload_time = "2024-04-23T22:16:16.976Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload_time = "2024-04-23T22:16:14.422Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload_time = "2025-03-05T20:03:41.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload_time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload_time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload_time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload_time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload_time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload_time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload_time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload_time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload_time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload_time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload_time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload_time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload_time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload_time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload_time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload_time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload_time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload_time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload_time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload_time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload_time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload_time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload_time = "2025-03-05T20:03:39.41Z" }, ] [[package]] @@ -868,16 +1128,16 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload_time = "2022-08-23T19:58:21.447Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226 }, + { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload_time = "2022-08-23T19:58:19.96Z" }, ] [[package]] name = "xmod" version = "1.8.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e628a919e1d7129e641997afadd946febdd704aecc5881/xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377", size = 3988 } +sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e628a919e1d7129e641997afadd946febdd704aecc5881/xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377", size = 3988, upload_time = "2024-01-04T18:03:17.663Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610 }, + { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload_time = "2024-01-04T18:03:16.078Z" }, ] From ea3f2f94d535c4b47ae3c904aa0189b77d751bd6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 2 May 2025 14:46:14 -0400 Subject: [PATCH 014/565] formatting --- main.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index 1688f2f9..4886789e 100644 --- a/main.py +++ b/main.py @@ -20,6 +20,7 @@ # Initialize FastAPI app app = FastAPI() + def choose_transport_interactive() -> str: """Prompt user for transport mode using inquirer.""" questions = [ @@ -28,14 +29,15 @@ def choose_transport_interactive() -> str: message="Choose transport mode", choices=[ ("stdio (Default CLI mode)", "stdio"), - ("sse (Server-Sent Events HTTP mode)", "sse") + ("sse (Server-Sent Events HTTP mode)", "sse"), ], - default="stdio" + default="stdio", ) ] answers = inquirer.prompt(questions) return answers["transport"] + def main() -> None: """Initialize and run the LinkedIn MCP server.""" print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") From 3d17538aa85f5126b024bbc5797dd6aaeccc2938 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 2 May 2025 14:47:53 -0400 Subject: [PATCH 015/565] Add pre-commit installation step to README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 58763360..9d3d6568 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,7 @@ Using `uv`: uv add "mcp[cli]" selenium httpx inquirer pyperclip uv add "git+https://github.com/stickerdaniel/linkedin_scraper.git" uv pip install -e . +pre-commit install ``` ### Step 4: Install ChromeDriver From b0c36595b69335dba69eef08681098b453e32096 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 13:24:06 -0400 Subject: [PATCH 016/565] fix(transport-mode): Refactor transport mode selection and remove FastAPI initialization. Pass trasport mode to mcp.run --- main.py | 31 ++++++++++--------------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/main.py b/main.py index 4886789e..7959cc35 100644 --- a/main.py +++ b/main.py @@ -7,21 +7,15 @@ import sys import logging -import uvicorn import inquirer # type: ignore # third-party package without type stubs -from typing import NoReturn -from fastapi import FastAPI +from typing import Literal, NoReturn from linkedin_mcp_server.arguments import parse_arguments from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -# Initialize FastAPI app -app = FastAPI() - - -def choose_transport_interactive() -> str: +def choose_transport_interactive() -> Literal["stdio", "sse"]: """Prompt user for transport mode using inquirer.""" questions = [ inquirer.List( @@ -59,6 +53,12 @@ def main() -> None: # Initialize the driver - with lazy initialization if specified initialize_driver(headless=args.headless, lazy_init=args.lazy_init) + # Decide transport + if args.setup: + transport: Literal["stdio", "sse"] = choose_transport_interactive() + else: + transport = "stdio" # Default to stdio without prompt + # Print configuration for Claude if in setup mode if args.setup: print_claude_config() @@ -66,20 +66,9 @@ def main() -> None: # Create and run the MCP server mcp = create_mcp_server() - # Decide transport - if args.setup: - transport = choose_transport_interactive() - else: - transport = "stdio" # Default to stdio without prompt - # Start server - if transport == "sse": - print("\n๐Ÿš€ Running LinkedIn MCP server (SSE mode)...") - uvicorn.run(app, host="0.0.0.0", port=8000) - else: - # Run using stdio - print("\n๐Ÿš€ Running LinkedIn MCP server (STDIO mode)...") - mcp.run(transport="stdio") + print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") + mcp.run(transport=transport) def exit_gracefully(exit_code: int = 0) -> NoReturn: From 745d1a3e72bda9040fadf8164f1d38a0d30ce536 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 14:35:59 -0400 Subject: [PATCH 017/565] chore(tasks): Add 'Follow Logs' task to monitor log files --- .vscode/tasks.json | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index adae2a55..ce734520 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -32,6 +32,19 @@ "focus": true }, "problemMatcher": [] + }, + { + "label": "Follow Logs", + "type": "shell", + "command": "tail", + "args": ["-n", "20", "-F", "~/Library/Logs/Claude/mcp*.log"], + "isBackground": true, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": false + }, + "problemMatcher": [] } ] } From 17db71f2da8f8982b19e02707d49c3349351cf11 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 14:41:19 -0400 Subject: [PATCH 018/565] fix(tasks): Update arguments for 'Run main.py' task to include full debug options --- .vscode/tasks.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ce734520..d3fdce1f 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -21,7 +21,7 @@ "label": "Run main.py", "type": "shell", "command": "uv", - "args": ["run", "main.py"], + "args": ["run", "main.py", "--no-headless", "--no-lazy-init", "--debug"], "group": { "kind": "build", "isDefault": true From 8391bb6acda27c38c2387dcf7970fd690b8b5ddb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 14:42:01 -0400 Subject: [PATCH 019/565] feat(credentials): store linkedin secrets in keychain --- main.py | 2 +- pyproject.toml | 1 + src/linkedin_mcp_server/credentials.py | 86 --- src/linkedin_mcp_server/drivers/chrome.py | 35 +- src/linkedin_mcp_server/secrets.py | 165 ++++ uv.lock | 893 +++++++++++++--------- 6 files changed, 693 insertions(+), 489 deletions(-) delete mode 100644 src/linkedin_mcp_server/credentials.py create mode 100644 src/linkedin_mcp_server/secrets.py diff --git a/main.py b/main.py index 7959cc35..890283b4 100644 --- a/main.py +++ b/main.py @@ -20,7 +20,7 @@ def choose_transport_interactive() -> Literal["stdio", "sse"]: questions = [ inquirer.List( "transport", - message="Choose transport mode", + message="Choose mcp transport mode", choices=[ ("stdio (Default CLI mode)", "stdio"), ("sse (Server-Sent Events HTTP mode)", "sse"), diff --git a/pyproject.toml b/pyproject.toml index 0e1d535f..a22fe9c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "fastapi[standard]>=0.115.12", "httpx>=0.28.1", "inquirer>=3.4.0", + "keyring>=25.6.0", "linkedin-scraper", "mcp[cli]>=1.6.0", "mypy>=1.15.0", diff --git a/src/linkedin_mcp_server/credentials.py b/src/linkedin_mcp_server/credentials.py deleted file mode 100644 index 79d4ad97..00000000 --- a/src/linkedin_mcp_server/credentials.py +++ /dev/null @@ -1,86 +0,0 @@ -# src/linkedin_mcp_server/credentials.py -""" -Credential management for LinkedIn MCP server. - -This module handles the secure storage and retrieval of LinkedIn credentials. -""" - -from typing import Dict, Optional -import os -import json -from pathlib import Path -import logging -import inquirer - -logger = logging.getLogger(__name__) - - -def get_credentials(non_interactive: bool = False) -> Optional[Dict[str, str]]: - """ - Get LinkedIn credentials from environment variables, stored file, or prompt. - - Args: - non_interactive: If True, only get credentials from environment or stored file, - without prompting the user. - - Returns: - Optional[Dict[str, str]]: Dictionary containing email and password, or None if - not available in non-interactive mode. - """ - # First, try environment variables - email = os.environ.get("LINKEDIN_EMAIL") - password = os.environ.get("LINKEDIN_PASSWORD") - - if email and password: - logger.info("Using LinkedIn credentials from environment variables") - return {"email": email, "password": password} - - # Second, try stored credentials file - credentials_file = Path.home() / ".linkedin_mcp_credentials.json" - if credentials_file.exists(): - try: - with open(credentials_file, "r") as f: - credentials = json.load(f) - if "email" in credentials and "password" in credentials: - logger.info("Using LinkedIn credentials from stored file") - return credentials - except Exception as e: - logger.error(f"Error reading credentials file: {e}") - - # If in non-interactive mode and we haven't found credentials yet, return None - if non_interactive: - logger.warning("No credentials found in non-interactive mode") - return None - - # Otherwise, prompt for credentials - return prompt_for_credentials() - - -def prompt_for_credentials() -> Dict[str, str]: - """ - Prompt user for LinkedIn credentials and store them. - - Returns: - Dict[str, str]: Dictionary containing email and password - """ - print("๐Ÿ”‘ LinkedIn credentials required") - questions = [ - inquirer.Text("email", message="LinkedIn Email"), - inquirer.Password("password", message="LinkedIn Password"), - ] - credentials = inquirer.prompt(questions) - - # Store credentials securely - try: - credentials_file = Path.home() / ".linkedin_mcp_credentials.json" - with open(credentials_file, "w") as f: - json.dump(credentials, f) - - # Set permissions to user-only read/write - os.chmod(credentials_file, 0o600) - print(f"โœ… Credentials stored with user-only read/write at {credentials_file}") - except Exception as e: - logger.warning(f"Could not store credentials: {e}") - print(f"โš ๏ธ Warning: Could not store credentials: {e}") - - return credentials diff --git a/src/linkedin_mcp_server/drivers/chrome.py b/src/linkedin_mcp_server/drivers/chrome.py index 7bb28086..9cd7d324 100644 --- a/src/linkedin_mcp_server/drivers/chrome.py +++ b/src/linkedin_mcp_server/drivers/chrome.py @@ -9,14 +9,13 @@ import os import sys import logging -from pathlib import Path import inquirer # type: ignore from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service from selenium.common.exceptions import WebDriverException - -from linkedin_mcp_server.credentials import get_credentials +from linkedin_mcp_server.secrets import get_credentials +from linkedin_scraper import actions # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} @@ -152,6 +151,7 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: raise WebDriverException(error_msg) +# src/linkedin_mcp_server/drivers/chrome.py (update function) def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) -> bool: """ Log in to LinkedIn using stored or provided credentials. @@ -167,33 +167,22 @@ def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) - credentials = get_credentials(non_interactive=non_interactive) if not credentials: - if non_interactive: - logger.error("No credentials available in non-interactive mode") - return False - else: - logger.error("Failed to obtain LinkedIn credentials") - return False + print("โŒ No credentials available") + return False try: - from linkedin_scraper import actions - # Login to LinkedIn - logger.info("Logging in to LinkedIn...") - if not non_interactive: - print("๐Ÿ”‘ Logging in to LinkedIn...") + print("๐Ÿ”‘ Logging in to LinkedIn...") actions.login(driver, credentials["email"], credentials["password"]) - if not non_interactive: - print("โœ… Successfully logged in to LinkedIn") - logger.info("Successfully logged in to LinkedIn") + print("โœ… Successfully logged in to LinkedIn") return True except Exception as e: error_msg = f"Failed to login: {str(e)}" - logger.error(error_msg) + print(f"โŒ {error_msg}") if not non_interactive: - print(f"โŒ {error_msg}") print( "โš ๏ธ You might need to confirm the login in your LinkedIn mobile app. " "Please try again and confirm the login." @@ -216,10 +205,10 @@ def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) - ) if retry and retry.get("retry", False): - # Remove old credentials and try again - credentials_file = Path.home() / ".linkedin_mcp_credentials.json" - if credentials_file.exists(): - os.remove(credentials_file) + # Clear credentials from keyring and try again + from linkedin_mcp_server.secrets import clear_credentials + + clear_credentials() # Try again with new credentials return login_to_linkedin(driver, non_interactive) diff --git a/src/linkedin_mcp_server/secrets.py b/src/linkedin_mcp_server/secrets.py new file mode 100644 index 00000000..5dc67bb3 --- /dev/null +++ b/src/linkedin_mcp_server/secrets.py @@ -0,0 +1,165 @@ +# src/linkedin_mcp_server/secrets.py +""" +Secure secrets management for LinkedIn MCP server. + +This module provides secure storage and retrieval of sensitive credentials +using the system's native keychain/credential manager. +""" + +from typing import Dict, Optional +import os +import platform +import logging +import keyring +from keyring.errors import KeyringError +import inquirer # type: ignore + +# Service name for the keyring +SERVICE_NAME = "linkedin_mcp_server" + +# Secret keys +EMAIL_KEY = "linkedin_email" +PASSWORD_KEY = "linkedin_password" + +logger = logging.getLogger(__name__) + + +def get_keyring_name() -> str: + """ + Get the name of the current keyring backend. + + Returns: + str: Human-readable name of the keyring backend based on platform + """ + system = platform.system() + if system == "Darwin": + return "macOS Keychain" + elif system == "Windows": + return "Windows Credential Locker" + else: + return keyring.get_keyring().__class__.__name__ + + +def get_secret(key: str) -> Optional[str]: + """ + Retrieve a secret from system keyring. + + Args: + key: The key identifier for the secret + + Returns: + Optional[str]: The secret value if found, None otherwise + """ + try: + secret = keyring.get_password(SERVICE_NAME, key) + return secret + except KeyringError as e: + logger.error(f"Error accessing keyring for {key}: {e}") + return None + + +def set_secret(key: str, value: str) -> bool: + """ + Store a secret in system keyring. + + Args: + key: The key identifier for the secret + value: The secret value to store + + Returns: + bool: True if successful, False otherwise + """ + try: + keyring.set_password(SERVICE_NAME, key, value) + logger.debug(f"Secret '{key}' stored successfully in {get_keyring_name()}") + return True + except KeyringError as e: + logger.error(f"Error storing secret '{key}': {e}") + return False + + +def get_credentials(non_interactive: bool = False) -> Optional[Dict[str, str]]: + """ + Get LinkedIn credentials from environment variables, keyring, or prompt. + + Args: + non_interactive: If True, only get credentials from environment or keyring, + without prompting the user. + + Returns: + Optional[Dict[str, str]]: Dictionary containing email and password, or None if + not available in non-interactive mode. + """ + # First, try environment variables + email = os.environ.get("LINKEDIN_EMAIL") + password = os.environ.get("LINKEDIN_PASSWORD") + + if email and password: + logger.info("Using LinkedIn credentials from environment variables") + return {"email": email, "password": password} + + # Second, try keyring + email = get_secret(EMAIL_KEY) + password = get_secret(PASSWORD_KEY) + + if email and password: + logger.info(f"Using LinkedIn credentials from {get_keyring_name()}") + return {"email": email, "password": password} + + # If in non-interactive mode and we haven't found credentials yet, return None + if non_interactive: + logger.error("No credentials found in non-interactive mode") + return None + + # Otherwise, prompt for credentials + return prompt_for_credentials() + + +def prompt_for_credentials() -> Dict[str, str]: + """ + Prompt user for LinkedIn credentials and store them securely. + + Returns: + Dict[str, str]: Dictionary containing email and password + """ + print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") + questions = [ + inquirer.Text("email", message="LinkedIn Email"), + inquirer.Password("password", message="LinkedIn Password"), + ] + credentials = inquirer.prompt(questions) + + if not credentials: + raise KeyboardInterrupt("Credential input was cancelled") + + # Store credentials securely in keyring + if set_secret(EMAIL_KEY, credentials["email"]) and set_secret( + PASSWORD_KEY, credentials["password"] + ): + print(f"โœ… Credentials stored securely in {get_keyring_name()}") + else: + print("โš ๏ธ Warning: Could not store credentials in system keyring.") + print(" Your credentials will only be used for this session.") + + return credentials + + +def clear_credentials() -> bool: + """ + Clear stored credentials from the keyring. + + Returns: + bool: True if successful, False otherwise + """ + success = True + try: + # Delete both keys + keyring.delete_password(SERVICE_NAME, EMAIL_KEY) + keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) + print(f"โœ… Credentials removed from {get_keyring_name()}") + except KeyringError as e: + success = False + logger.error(f"Error clearing credentials: {e}") + print(f"โŒ Error clearing credentials: {e}") + + return success diff --git a/uv.lock b/uv.lock index d698ecb6..d943e27b 100644 --- a/uv.lock +++ b/uv.lock @@ -6,18 +6,18 @@ requires-python = ">=3.12" name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload_time = "2024-05-20T21:33:25.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload_time = "2024-05-20T21:33:24.1Z" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] name = "ansicon" version = "1.89.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/e2/1c866404ddbd280efedff4a9f15abfe943cb83cde6e895022370f3a61f85/ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1", size = 67312, upload_time = "2019-04-29T20:23:57.314Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/e2/1c866404ddbd280efedff4a9f15abfe943cb83cde6e895022370f3a61f85/ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1", size = 67312, upload-time = "2019-04-29T20:23:57.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/75/f9/f1c10e223c7b56a38109a3f2eb4e7fe9a757ea3ed3a166754fb30f65e466/ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec", size = 63675, upload_time = "2019-04-29T20:23:53.83Z" }, + { url = "https://files.pythonhosted.org/packages/75/f9/f1c10e223c7b56a38109a3f2eb4e7fe9a757ea3ed3a166754fb30f65e466/ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec", size = 63675, upload-time = "2019-04-29T20:23:53.83Z" }, ] [[package]] @@ -29,18 +29,18 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload_time = "2025-03-17T00:02:54.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload_time = "2025-03-17T00:02:52.713Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, ] [[package]] name = "attrs" version = "25.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload_time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload_time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] [[package]] @@ -52,18 +52,18 @@ dependencies = [ { name = "six" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/ae/92e9968ad23205389ec6bd82e2d4fca3817f1cdef34e10aa8d529ef8b1d7/blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680", size = 6655612, upload_time = "2023-02-04T02:25:45.886Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ae/92e9968ad23205389ec6bd82e2d4fca3817f1cdef34e10aa8d529ef8b1d7/blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680", size = 6655612, upload-time = "2023-02-04T02:25:45.886Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372, upload_time = "2023-02-04T02:25:43.093Z" }, + { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372, upload-time = "2023-02-04T02:25:43.093Z" }, ] [[package]] name = "certifi" version = "2025.1.31" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload_time = "2025-01-31T02:16:47.166Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload-time = "2025-01-31T02:16:47.166Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload_time = "2025-01-31T02:16:45.015Z" }, + { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload-time = "2025-01-31T02:16:45.015Z" }, ] [[package]] @@ -73,56 +73,70 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload_time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload_time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload_time = "2024-09-04T20:44:27.578Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload_time = "2024-09-04T20:44:43.733Z" }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload_time = "2024-09-04T20:44:45.309Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, ] [[package]] name = "cfgv" version = "3.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload_time = "2023-08-12T20:38:17.776Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload_time = "2023-08-12T20:38:16.269Z" }, + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload_time = "2024-12-24T18:12:35.43Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload_time = "2024-12-24T18:10:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload_time = "2024-12-24T18:10:44.272Z" }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload_time = "2024-12-24T18:10:45.492Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload_time = "2024-12-24T18:10:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload_time = "2024-12-24T18:10:50.589Z" }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload_time = "2024-12-24T18:10:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload_time = "2024-12-24T18:10:53.789Z" }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload_time = "2024-12-24T18:10:55.048Z" }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload_time = "2024-12-24T18:10:57.647Z" }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload_time = "2024-12-24T18:10:59.43Z" }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload_time = "2024-12-24T18:11:00.676Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload_time = "2024-12-24T18:11:01.952Z" }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload_time = "2024-12-24T18:11:03.142Z" }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload_time = "2024-12-24T18:11:05.834Z" }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload_time = "2024-12-24T18:11:07.064Z" }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload_time = "2024-12-24T18:11:08.374Z" }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload_time = "2024-12-24T18:11:09.831Z" }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload_time = "2024-12-24T18:11:12.03Z" }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload_time = "2024-12-24T18:11:13.372Z" }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload_time = "2024-12-24T18:11:14.628Z" }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload_time = "2024-12-24T18:11:17.672Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload_time = "2024-12-24T18:11:18.989Z" }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload_time = "2024-12-24T18:11:21.507Z" }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload_time = "2024-12-24T18:11:22.774Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload_time = "2024-12-24T18:11:24.139Z" }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload_time = "2024-12-24T18:11:26.535Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload_time = "2024-12-24T18:12:32.852Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload-time = "2024-12-24T18:11:05.834Z" }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload-time = "2024-12-24T18:11:07.064Z" }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload-time = "2024-12-24T18:11:08.374Z" }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload-time = "2024-12-24T18:11:09.831Z" }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload-time = "2024-12-24T18:11:12.03Z" }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload-time = "2024-12-24T18:11:13.372Z" }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload-time = "2024-12-24T18:11:14.628Z" }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload-time = "2024-12-24T18:11:17.672Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload-time = "2024-12-24T18:11:18.989Z" }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload-time = "2024-12-24T18:11:21.507Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload-time = "2024-12-24T18:11:22.774Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload-time = "2024-12-24T18:11:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload-time = "2024-12-24T18:11:26.535Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" }, ] [[package]] @@ -132,36 +146,65 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload_time = "2024-12-21T18:38:44.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload_time = "2024-12-21T18:38:41.666Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload_time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload_time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "44.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, + { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, + { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, + { url = "https://files.pythonhosted.org/packages/42/b2/7d31f2af5591d217d71d37d044ef5412945a8a8e98d5a2a8ae4fd9cd4489/cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359", size = 3689263, upload-time = "2025-05-02T19:34:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/25/50/c0dfb9d87ae88ccc01aad8eb93e23cfbcea6a6a106a9b63a7b14c1f93c75/cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43", size = 4196198, upload-time = "2025-05-02T19:35:00.988Z" }, + { url = "https://files.pythonhosted.org/packages/66/c9/55c6b8794a74da652690c898cb43906310a3e4e4f6ee0b5f8b3b3e70c441/cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01", size = 3966502, upload-time = "2025-05-02T19:35:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, + { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, + { url = "https://files.pythonhosted.org/packages/01/44/eb6522db7d9f84e8833ba3bf63313f8e257729cf3a8917379473fcfd6601/cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54", size = 3689164, upload-time = "2025-05-02T19:35:21.449Z" }, + { url = "https://files.pythonhosted.org/packages/68/fb/d61a4defd0d6cee20b1b8a1ea8f5e25007e26aeb413ca53835f0cae2bcd1/cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93", size = 4198081, upload-time = "2025-05-02T19:35:23.187Z" }, + { url = "https://files.pythonhosted.org/packages/1b/50/457f6911d36432a8811c3ab8bd5a6090e8d18ce655c22820994913dd06ea/cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c", size = 3967716, upload-time = "2025-05-02T19:35:25.426Z" }, + { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, ] [[package]] name = "distlib" version = "0.3.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload_time = "2024-10-09T18:35:47.551Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload_time = "2024-10-09T18:35:44.272Z" }, + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] [[package]] name = "dnspython" version = "2.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload_time = "2024-10-05T20:14:59.362Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload_time = "2024-10-05T20:14:57.687Z" }, + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, ] [[package]] @@ -172,9 +215,9 @@ dependencies = [ { name = "runs" }, { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/92/734a4ab345914259cb6146fd36512608ea42be16195375c379046f33283d/editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8", size = 3197, upload_time = "2024-01-25T10:44:59.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/92/734a4ab345914259cb6146fd36512608ea42be16195375c379046f33283d/editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8", size = 3197, upload-time = "2024-01-25T10:44:59.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload_time = "2024-01-25T10:44:58.66Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, ] [[package]] @@ -185,9 +228,9 @@ dependencies = [ { name = "dnspython" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload_time = "2024-06-20T11:30:30.034Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload_time = "2024-06-20T11:30:28.248Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, ] [[package]] @@ -199,9 +242,9 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload_time = "2025-03-23T22:55:43.822Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload_time = "2025-03-23T22:55:42.101Z" }, + { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" }, ] [package.optional-dependencies] @@ -223,9 +266,9 @@ dependencies = [ { name = "typer" }, { name = "uvicorn", extra = ["standard"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753, upload_time = "2024-12-15T14:28:10.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753, upload-time = "2024-12-15T14:28:10.028Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705, upload_time = "2024-12-15T14:28:06.18Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705, upload-time = "2024-12-15T14:28:06.18Z" }, ] [package.optional-dependencies] @@ -237,18 +280,18 @@ standard = [ name = "filelock" version = "3.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload_time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload_time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, ] [[package]] name = "h11" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload_time = "2022-09-25T15:40:01.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload-time = "2022-09-25T15:40:01.519Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload_time = "2022-09-25T15:39:59.68Z" }, + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload-time = "2022-09-25T15:39:59.68Z" }, ] [[package]] @@ -259,31 +302,31 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385, upload_time = "2025-04-11T14:42:46.661Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385, upload-time = "2025-04-11T14:42:46.661Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732, upload_time = "2025-04-11T14:42:44.896Z" }, + { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732, upload-time = "2025-04-11T14:42:44.896Z" }, ] [[package]] name = "httptools" version = "0.6.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload_time = "2024-10-16T19:45:08.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload_time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload_time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload_time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload_time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload_time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload_time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload_time = "2024-10-16T19:44:37.357Z" }, - { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload_time = "2024-10-16T19:44:38.738Z" }, - { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload_time = "2024-10-16T19:44:39.818Z" }, - { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload_time = "2024-10-16T19:44:41.189Z" }, - { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload_time = "2024-10-16T19:44:42.384Z" }, - { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload_time = "2024-10-16T19:44:43.959Z" }, - { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload_time = "2024-10-16T19:44:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload_time = "2024-10-16T19:44:46.46Z" }, + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" }, + { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, ] [[package]] @@ -296,36 +339,36 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload_time = "2024-12-06T15:37:23.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload_time = "2024-12-06T15:37:21.509Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] [[package]] name = "httpx-sse" version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload_time = "2023-12-22T08:01:21.083Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload-time = "2023-12-22T08:01:21.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload_time = "2023-12-22T08:01:19.89Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload-time = "2023-12-22T08:01:19.89Z" }, ] [[package]] name = "identify" version = "2.6.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249, upload_time = "2025-03-08T15:54:13.632Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249, upload-time = "2025-03-08T15:54:13.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101, upload_time = "2025-03-08T15:54:12.026Z" }, + { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101, upload-time = "2025-03-08T15:54:12.026Z" }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload_time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload_time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] [[package]] @@ -337,9 +380,51 @@ dependencies = [ { name = "editor" }, { name = "readchar" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/06/ef91eb8f3feafb736aa33dcb278fc9555d17861aa571b684715d095db24d/inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b", size = 14472, upload_time = "2024-08-12T12:03:43.83Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/06/ef91eb8f3feafb736aa33dcb278fc9555d17861aa571b684715d095db24d/inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b", size = 14472, upload-time = "2024-08-12T12:03:43.83Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/b2/be907c8c0f8303bc4b10089f5470014c3bf3521e9b8d3decf3037fd94725/inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60", size = 18077, upload-time = "2024-08-12T12:03:41.589Z" }, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/b2/be907c8c0f8303bc4b10089f5470014c3bf3521e9b8d3decf3037fd94725/inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60", size = 18077, upload_time = "2024-08-12T12:03:41.589Z" }, + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/23/9894b3df5d0a6eb44611c36aec777823fc2e07740dabbd0b810e19594013/jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d", size = 19159, upload-time = "2024-09-27T19:47:09.122Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/4f/24b319316142c44283d7540e76c7b5a6dbd5db623abd86bb7b3491c21018/jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649", size = 10187, upload-time = "2024-09-27T19:47:07.14Z" }, +] + +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, ] [[package]] @@ -349,9 +434,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload_time = "2025-03-05T20:05:02.478Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload_time = "2025-03-05T20:05:00.369Z" }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] [[package]] @@ -361,9 +446,26 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ansicon", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/d0/59b2b80e7a52d255f9e0ad040d2e826342d05580c4b1d7d7747cfb8db731/jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf", size = 80981, upload_time = "2024-07-31T22:39:18.854Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/d0/59b2b80e7a52d255f9e0ad040d2e826342d05580c4b1d7d7747cfb8db731/jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf", size = 80981, upload-time = "2024-07-31T22:39:18.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload-time = "2024-07-31T22:39:17.426Z" }, +] + +[[package]] +name = "keyring" +version = "25.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload_time = "2024-07-31T22:39:17.426Z" }, + { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, ] [[package]] @@ -374,6 +476,7 @@ dependencies = [ { name = "fastapi", extra = ["standard"] }, { name = "httpx" }, { name = "inquirer" }, + { name = "keyring" }, { name = "linkedin-scraper" }, { name = "mcp", extra = ["cli"] }, { name = "mypy" }, @@ -386,6 +489,7 @@ requires-dist = [ { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "inquirer", specifier = ">=3.4.0" }, + { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, { name = "mypy", specifier = ">=1.15.0" }, @@ -407,42 +511,42 @@ dependencies = [ name = "lxml" version = "5.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948, upload_time = "2025-04-05T18:31:58.757Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212, upload_time = "2025-04-05T18:26:42.692Z" }, - { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439, upload_time = "2025-04-05T18:26:46.468Z" }, - { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146, upload_time = "2025-04-05T18:26:49.737Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082, upload_time = "2025-04-05T18:26:52.295Z" }, - { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300, upload_time = "2025-04-05T18:26:54.923Z" }, - { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655, upload_time = "2025-04-05T18:26:57.488Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795, upload_time = "2025-04-05T18:27:00.126Z" }, - { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791, upload_time = "2025-04-05T18:27:03.061Z" }, - { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807, upload_time = "2025-04-05T18:27:05.877Z" }, - { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213, upload_time = "2025-04-05T18:27:08.588Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694, upload_time = "2025-04-05T18:27:11.66Z" }, - { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865, upload_time = "2025-04-05T18:27:14.194Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383, upload_time = "2025-04-05T18:27:16.988Z" }, - { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864, upload_time = "2025-04-05T18:27:19.703Z" }, - { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819, upload_time = "2025-04-05T18:27:22.814Z" }, - { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177, upload_time = "2025-04-05T18:27:25.078Z" }, - { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134, upload_time = "2025-04-05T18:27:27.481Z" }, - { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598, upload_time = "2025-04-05T18:27:31.229Z" }, - { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586, upload_time = "2025-04-05T18:27:33.883Z" }, - { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447, upload_time = "2025-04-05T18:27:36.426Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583, upload_time = "2025-04-05T18:27:39.492Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684, upload_time = "2025-04-05T18:27:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797, upload_time = "2025-04-05T18:27:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302, upload_time = "2025-04-05T18:27:47.979Z" }, - { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247, upload_time = "2025-04-05T18:27:51.174Z" }, - { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824, upload_time = "2025-04-05T18:27:54.15Z" }, - { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079, upload_time = "2025-04-05T18:27:57.03Z" }, - { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041, upload_time = "2025-04-05T18:27:59.918Z" }, - { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761, upload_time = "2025-04-05T18:28:02.83Z" }, - { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209, upload_time = "2025-04-05T18:28:05.851Z" }, - { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231, upload_time = "2025-04-05T18:28:08.849Z" }, - { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899, upload_time = "2025-04-05T18:28:11.729Z" }, - { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315, upload_time = "2025-04-05T18:28:14.815Z" }, - { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639, upload_time = "2025-04-05T18:28:17.268Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948, upload-time = "2025-04-05T18:31:58.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212, upload-time = "2025-04-05T18:26:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439, upload-time = "2025-04-05T18:26:46.468Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146, upload-time = "2025-04-05T18:26:49.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082, upload-time = "2025-04-05T18:26:52.295Z" }, + { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300, upload-time = "2025-04-05T18:26:54.923Z" }, + { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655, upload-time = "2025-04-05T18:26:57.488Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795, upload-time = "2025-04-05T18:27:00.126Z" }, + { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791, upload-time = "2025-04-05T18:27:03.061Z" }, + { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807, upload-time = "2025-04-05T18:27:05.877Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213, upload-time = "2025-04-05T18:27:08.588Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694, upload-time = "2025-04-05T18:27:11.66Z" }, + { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865, upload-time = "2025-04-05T18:27:14.194Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383, upload-time = "2025-04-05T18:27:16.988Z" }, + { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864, upload-time = "2025-04-05T18:27:19.703Z" }, + { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819, upload-time = "2025-04-05T18:27:22.814Z" }, + { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177, upload-time = "2025-04-05T18:27:25.078Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134, upload-time = "2025-04-05T18:27:27.481Z" }, + { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598, upload-time = "2025-04-05T18:27:31.229Z" }, + { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586, upload-time = "2025-04-05T18:27:33.883Z" }, + { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447, upload-time = "2025-04-05T18:27:36.426Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583, upload-time = "2025-04-05T18:27:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684, upload-time = "2025-04-05T18:27:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797, upload-time = "2025-04-05T18:27:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302, upload-time = "2025-04-05T18:27:47.979Z" }, + { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247, upload-time = "2025-04-05T18:27:51.174Z" }, + { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824, upload-time = "2025-04-05T18:27:54.15Z" }, + { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079, upload-time = "2025-04-05T18:27:57.03Z" }, + { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041, upload-time = "2025-04-05T18:27:59.918Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761, upload-time = "2025-04-05T18:28:02.83Z" }, + { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209, upload-time = "2025-04-05T18:28:05.851Z" }, + { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231, upload-time = "2025-04-05T18:28:08.849Z" }, + { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899, upload-time = "2025-04-05T18:28:11.729Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315, upload-time = "2025-04-05T18:28:14.815Z" }, + { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639, upload-time = "2025-04-05T18:28:17.268Z" }, ] [[package]] @@ -452,47 +556,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload_time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload_time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, ] [[package]] name = "markupsafe" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload_time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload_time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload_time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload_time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload_time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload_time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload_time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload_time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload_time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload_time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload_time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload_time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload_time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload_time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload_time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload_time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload_time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload_time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload_time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload_time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload_time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload_time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload_time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload_time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload_time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload_time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload_time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload_time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload_time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload_time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload_time = "2024-10-18T15:21:42.784Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] [[package]] @@ -509,9 +613,9 @@ dependencies = [ { name = "starlette" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload_time = "2025-03-27T16:46:32.336Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload-time = "2025-03-27T16:46:32.336Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload_time = "2025-03-27T16:46:29.919Z" }, + { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload-time = "2025-03-27T16:46:29.919Z" }, ] [package.optional-dependencies] @@ -524,9 +628,18 @@ cli = [ name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload_time = "2022-08-14T12:40:10.846Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload_time = "2022-08-14T12:40:09.779Z" }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "more-itertools" +version = "10.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] [[package]] @@ -537,39 +650,39 @@ dependencies = [ { name = "mypy-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717, upload_time = "2025-02-05T03:50:34.655Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717, upload-time = "2025-02-05T03:50:34.655Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981, upload_time = "2025-02-05T03:50:28.25Z" }, - { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175, upload_time = "2025-02-05T03:50:13.411Z" }, - { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675, upload_time = "2025-02-05T03:50:31.421Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020, upload_time = "2025-02-05T03:48:48.705Z" }, - { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582, upload_time = "2025-02-05T03:49:03.628Z" }, - { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614, upload_time = "2025-02-05T03:50:00.313Z" }, - { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592, upload_time = "2025-02-05T03:48:55.789Z" }, - { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611, upload_time = "2025-02-05T03:48:44.581Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443, upload_time = "2025-02-05T03:49:25.514Z" }, - { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541, upload_time = "2025-02-05T03:49:57.623Z" }, - { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348, upload_time = "2025-02-05T03:48:52.361Z" }, - { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648, upload_time = "2025-02-05T03:49:11.395Z" }, - { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload_time = "2025-02-05T03:50:08.348Z" }, + { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981, upload-time = "2025-02-05T03:50:28.25Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175, upload-time = "2025-02-05T03:50:13.411Z" }, + { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675, upload-time = "2025-02-05T03:50:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020, upload-time = "2025-02-05T03:48:48.705Z" }, + { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582, upload-time = "2025-02-05T03:49:03.628Z" }, + { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614, upload-time = "2025-02-05T03:50:00.313Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592, upload-time = "2025-02-05T03:48:55.789Z" }, + { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611, upload-time = "2025-02-05T03:48:44.581Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443, upload-time = "2025-02-05T03:49:25.514Z" }, + { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541, upload-time = "2025-02-05T03:49:57.623Z" }, + { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348, upload-time = "2025-02-05T03:48:52.361Z" }, + { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648, upload-time = "2025-02-05T03:49:11.395Z" }, + { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload-time = "2025-02-05T03:50:08.348Z" }, ] [[package]] name = "mypy-extensions" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload_time = "2023-02-04T12:11:27.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload-time = "2023-02-04T12:11:27.157Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload_time = "2023-02-04T12:11:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload-time = "2023-02-04T12:11:25.002Z" }, ] [[package]] name = "nodeenv" version = "1.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload_time = "2024-06-04T18:44:11.171Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload_time = "2024-06-04T18:44:08.352Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] [[package]] @@ -579,18 +692,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload_time = "2023-10-26T04:26:04.361Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload_time = "2023-10-26T04:26:02.532Z" }, + { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" }, ] [[package]] name = "platformdirs" version = "4.3.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291, upload_time = "2025-03-19T20:36:10.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291, upload-time = "2025-03-19T20:36:10.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499, upload_time = "2025-03-19T20:36:09.038Z" }, + { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499, upload-time = "2025-03-19T20:36:09.038Z" }, ] [[package]] @@ -604,18 +717,18 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload_time = "2025-03-18T21:35:20.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload_time = "2025-03-18T21:35:19.343Z" }, + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] [[package]] name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload_time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload_time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, ] [[package]] @@ -628,9 +741,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload_time = "2025-04-08T13:27:06.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload-time = "2025-04-08T13:27:06.399Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload_time = "2025-04-08T13:27:03.789Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload-time = "2025-04-08T13:27:03.789Z" }, ] [[package]] @@ -640,39 +753,39 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload_time = "2025-04-02T09:49:41.8Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload_time = "2025-04-02T09:47:25.394Z" }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload_time = "2025-04-02T09:47:27.417Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload_time = "2025-04-02T09:47:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload_time = "2025-04-02T09:47:33.464Z" }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload_time = "2025-04-02T09:47:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload_time = "2025-04-02T09:47:37.315Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload_time = "2025-04-02T09:47:39.013Z" }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload_time = "2025-04-02T09:47:40.427Z" }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload_time = "2025-04-02T09:47:42.01Z" }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload_time = "2025-04-02T09:47:43.425Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload_time = "2025-04-02T09:47:44.979Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload_time = "2025-04-02T09:47:46.843Z" }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload_time = "2025-04-02T09:47:48.404Z" }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload_time = "2025-04-02T09:47:49.839Z" }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload_time = "2025-04-02T09:47:51.648Z" }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload_time = "2025-04-02T09:47:53.149Z" }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload_time = "2025-04-02T09:47:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload_time = "2025-04-02T09:47:56.532Z" }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload_time = "2025-04-02T09:47:58.088Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload_time = "2025-04-02T09:47:59.591Z" }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload_time = "2025-04-02T09:48:01.397Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload_time = "2025-04-02T09:48:03.056Z" }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload_time = "2025-04-02T09:48:04.662Z" }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload_time = "2025-04-02T09:48:06.226Z" }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload_time = "2025-04-02T09:48:08.114Z" }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload_time = "2025-04-02T09:48:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload_time = "2025-04-02T09:48:11.288Z" }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload_time = "2025-04-02T09:48:12.861Z" }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload_time = "2025-04-02T09:48:14.553Z" }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload_time = "2025-04-02T09:48:16.222Z" }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload_time = "2025-04-02T09:48:17.97Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload-time = "2025-04-02T09:49:41.8Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload-time = "2025-04-02T09:47:25.394Z" }, + { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload-time = "2025-04-02T09:47:27.417Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload-time = "2025-04-02T09:47:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload-time = "2025-04-02T09:47:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload-time = "2025-04-02T09:47:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload-time = "2025-04-02T09:47:37.315Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload-time = "2025-04-02T09:47:39.013Z" }, + { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload-time = "2025-04-02T09:47:40.427Z" }, + { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload-time = "2025-04-02T09:47:42.01Z" }, + { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload-time = "2025-04-02T09:47:43.425Z" }, + { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload-time = "2025-04-02T09:47:44.979Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload-time = "2025-04-02T09:47:46.843Z" }, + { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload-time = "2025-04-02T09:47:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload-time = "2025-04-02T09:47:49.839Z" }, + { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload-time = "2025-04-02T09:47:51.648Z" }, + { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload-time = "2025-04-02T09:47:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload-time = "2025-04-02T09:47:55.006Z" }, + { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload-time = "2025-04-02T09:47:56.532Z" }, + { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload-time = "2025-04-02T09:47:58.088Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload-time = "2025-04-02T09:47:59.591Z" }, + { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload-time = "2025-04-02T09:48:01.397Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload-time = "2025-04-02T09:48:03.056Z" }, + { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload-time = "2025-04-02T09:48:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload-time = "2025-04-02T09:48:06.226Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload-time = "2025-04-02T09:48:08.114Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload-time = "2025-04-02T09:48:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload-time = "2025-04-02T09:48:11.288Z" }, + { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload-time = "2025-04-02T09:48:12.861Z" }, + { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload-time = "2025-04-02T09:48:14.553Z" }, + { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload-time = "2025-04-02T09:48:16.222Z" }, + { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload-time = "2025-04-02T09:48:17.97Z" }, ] [[package]] @@ -683,86 +796,95 @@ dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550, upload_time = "2025-02-27T10:10:32.338Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550, upload-time = "2025-02-27T10:10:32.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839, upload_time = "2025-02-27T10:10:30.711Z" }, + { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839, upload-time = "2025-02-27T10:10:30.711Z" }, ] [[package]] name = "pygments" version = "2.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload_time = "2025-01-06T17:26:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload_time = "2025-01-06T17:26:25.553Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, ] [[package]] name = "pyperclip" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload_time = "2024-06-18T20:38:48.401Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" } [[package]] name = "pysocks" version = "1.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload_time = "2019-09-20T02:07:35.714Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload-time = "2019-09-20T02:07:35.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload_time = "2019-09-20T02:06:22.938Z" }, + { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" }, ] [[package]] name = "python-dotenv" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload_time = "2025-03-25T10:14:56.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload_time = "2025-03-25T10:14:55.034Z" }, + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] [[package]] name = "python-multipart" version = "0.0.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload_time = "2024-12-16T19:45:46.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload_time = "2024-12-16T19:45:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload_time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload_time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload_time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload_time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload_time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload_time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload_time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload_time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload_time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload_time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload_time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload_time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload_time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload_time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload_time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload_time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload_time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload_time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload_time = "2024-08-06T20:33:04.33Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] [[package]] name = "readchar" version = "4.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685, upload_time = "2024-11-04T18:28:07.757Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685, upload-time = "2024-11-04T18:28:07.757Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload_time = "2024-11-04T18:28:02.859Z" }, + { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] [[package]] @@ -775,9 +897,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload_time = "2024-05-29T15:37:49.536Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload_time = "2024-05-29T15:37:47.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, ] [[package]] @@ -788,9 +910,9 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload_time = "2025-03-30T14:15:14.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload_time = "2025-03-30T14:15:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] [[package]] @@ -802,9 +924,9 @@ dependencies = [ { name = "rich" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/33/18332e1359803ae6407a1e605a6bdb253a426ffe931555f1299f9e39eece/rich_toolkit-0.14.4.tar.gz", hash = "sha256:db256cf45165cae381c9bbf3b48a0fd4d99a07c80155cc655c80212a62e28fe1", size = 104487, upload_time = "2025-04-29T19:43:36.904Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/33/18332e1359803ae6407a1e605a6bdb253a426ffe931555f1299f9e39eece/rich_toolkit-0.14.4.tar.gz", hash = "sha256:db256cf45165cae381c9bbf3b48a0fd4d99a07c80155cc655c80212a62e28fe1", size = 104487, upload-time = "2025-04-29T19:43:36.904Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/48/c6d43d4c56c45c0171c771b2b73deeec493efb57795b651319201e7c4638/rich_toolkit-0.14.4-py3-none-any.whl", hash = "sha256:cc71ebee83eaa122d8e42882408bc5a4bf0240bbf1e368811ee56d249b3d742a", size = 24258, upload_time = "2025-04-29T19:43:35.502Z" }, + { url = "https://files.pythonhosted.org/packages/44/48/c6d43d4c56c45c0171c771b2b73deeec493efb57795b651319201e7c4638/rich_toolkit-0.14.4-py3-none-any.whl", hash = "sha256:cc71ebee83eaa122d8e42882408bc5a4bf0240bbf1e368811ee56d249b3d742a", size = 24258, upload-time = "2025-04-29T19:43:35.502Z" }, ] [[package]] @@ -814,9 +936,22 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/6d/b9aace390f62db5d7d2c77eafce3d42774f27f1829d24fa9b6f598b3ef71/runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1", size = 5474, upload_time = "2024-01-25T14:44:01.563Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/6d/b9aace390f62db5d7d2c77eafce3d42774f27f1829d24fa9b6f598b3ef71/runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1", size = 5474, upload-time = "2024-01-25T14:44:01.563Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/d6/17caf2e4af1dec288477a0cbbe4a96fbc9b8a28457dce3f1f452630ce216/runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd", size = 7033, upload-time = "2024-01-25T14:43:59.959Z" }, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739, upload-time = "2022-08-13T16:22:46.976Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/d6/17caf2e4af1dec288477a0cbbe4a96fbc9b8a28457dce3f1f452630ce216/runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd", size = 7033, upload_time = "2024-01-25T14:43:59.959Z" }, + { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" }, ] [[package]] @@ -831,45 +966,45 @@ dependencies = [ { name = "urllib3", extra = ["socks"] }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/bf/642cce8b5a9edad8e4880fdefbeb24f69bec2086b1121c63f883c412b797/selenium-4.31.0.tar.gz", hash = "sha256:441cffc436a2e6659fe3cfb012692435652efd38b0d368d16f661a5db47825f5", size = 855418, upload_time = "2025-04-05T00:43:06.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/bf/642cce8b5a9edad8e4880fdefbeb24f69bec2086b1121c63f883c412b797/selenium-4.31.0.tar.gz", hash = "sha256:441cffc436a2e6659fe3cfb012692435652efd38b0d368d16f661a5db47825f5", size = 855418, upload-time = "2025-04-05T00:43:06.447Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/53/212db779d2481b0a8428365960596f8d5a4d482ae12c441d0507fd54aaf2/selenium-4.31.0-py3-none-any.whl", hash = "sha256:7b8b8d5e424d7133cb7aa656263b19ac505ec26d65c0f921a696e7e2c5ccd95b", size = 9350584, upload_time = "2025-04-05T00:43:04.04Z" }, + { url = "https://files.pythonhosted.org/packages/32/53/212db779d2481b0a8428365960596f8d5a4d482ae12c441d0507fd54aaf2/selenium-4.31.0-py3-none-any.whl", hash = "sha256:7b8b8d5e424d7133cb7aa656263b19ac505ec26d65c0f921a696e7e2c5ccd95b", size = 9350584, upload-time = "2025-04-05T00:43:04.04Z" }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload_time = "2023-10-24T04:13:40.426Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload_time = "2023-10-24T04:13:38.866Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload_time = "2024-12-04T17:35:28.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload_time = "2024-12-04T17:35:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload_time = "2024-02-25T23:20:04.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload_time = "2024-02-25T23:20:01.196Z" }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] [[package]] name = "sortedcontainers" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload_time = "2021-05-16T22:03:42.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload_time = "2021-05-16T22:03:41.177Z" }, + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, ] [[package]] @@ -880,9 +1015,9 @@ dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376, upload_time = "2024-12-25T09:09:30.616Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376, upload-time = "2024-12-25T09:09:30.616Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120, upload_time = "2024-12-25T09:09:26.761Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120, upload-time = "2024-12-25T09:09:26.761Z" }, ] [[package]] @@ -892,9 +1027,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102, upload_time = "2025-03-08T10:55:34.504Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102, upload-time = "2025-03-08T10:55:34.504Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995, upload_time = "2025-03-08T10:55:32.662Z" }, + { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995, upload-time = "2025-03-08T10:55:32.662Z" }, ] [[package]] @@ -909,9 +1044,9 @@ dependencies = [ { name = "sniffio" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952, upload_time = "2025-02-14T07:13:50.724Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952, upload-time = "2025-02-14T07:13:50.724Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920, upload_time = "2025-02-14T07:13:48.696Z" }, + { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920, upload-time = "2025-02-14T07:13:48.696Z" }, ] [[package]] @@ -923,9 +1058,9 @@ dependencies = [ { name = "trio" }, { name = "wsproto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/3c/8b4358e81f2f2cfe71b66a267f023a91db20a817b9425dd964873796980a/trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae", size = 33549, upload_time = "2025-02-25T05:16:58.947Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/3c/8b4358e81f2f2cfe71b66a267f023a91db20a817b9425dd964873796980a/trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae", size = 33549, upload-time = "2025-02-25T05:16:58.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221, upload_time = "2025-02-25T05:16:57.545Z" }, + { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221, upload-time = "2025-02-25T05:16:57.545Z" }, ] [[package]] @@ -938,18 +1073,18 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711, upload_time = "2025-02-27T19:17:34.807Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711, upload-time = "2025-02-27T19:17:34.807Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061, upload_time = "2025-02-27T19:17:32.111Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061, upload-time = "2025-02-27T19:17:32.111Z" }, ] [[package]] name = "typing-extensions" version = "4.13.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload_time = "2025-04-10T14:19:05.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload_time = "2025-04-10T14:19:03.967Z" }, + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, ] [[package]] @@ -959,18 +1094,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload_time = "2025-02-25T17:27:59.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload_time = "2025-02-25T17:27:57.754Z" }, + { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" }, ] [[package]] name = "urllib3" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload_time = "2025-04-10T15:23:39.232Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload_time = "2025-04-10T15:23:37.377Z" }, + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, ] [package.optional-dependencies] @@ -986,9 +1121,9 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568, upload_time = "2024-12-15T13:33:30.42Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568, upload-time = "2024-12-15T13:33:30.42Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload_time = "2024-12-15T13:33:27.467Z" }, + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload-time = "2024-12-15T13:33:27.467Z" }, ] [package.optional-dependencies] @@ -1006,20 +1141,20 @@ standard = [ name = "uvloop" version = "0.21.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload_time = "2024-10-14T23:38:35.489Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload_time = "2024-10-14T23:37:47.833Z" }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload_time = "2024-10-14T23:37:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload_time = "2024-10-14T23:37:51.703Z" }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload_time = "2024-10-14T23:37:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload_time = "2024-10-14T23:37:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload_time = "2024-10-14T23:37:58.195Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload_time = "2024-10-14T23:38:00.688Z" }, - { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload_time = "2024-10-14T23:38:02.309Z" }, - { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload_time = "2024-10-14T23:38:04.711Z" }, - { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload_time = "2024-10-14T23:38:06.385Z" }, - { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload_time = "2024-10-14T23:38:08.416Z" }, - { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload_time = "2024-10-14T23:38:10.888Z" }, + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, ] [[package]] @@ -1031,9 +1166,9 @@ dependencies = [ { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/e0/633e369b91bbc664df47dcb5454b6c7cf441e8f5b9d0c250ce9f0546401e/virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", size = 4346945, upload_time = "2025-03-31T16:33:29.185Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/e0/633e369b91bbc664df47dcb5454b6c7cf441e8f5b9d0c250ce9f0546401e/virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", size = 4346945, upload-time = "2025-03-31T16:33:29.185Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/ed/3cfeb48175f0671ec430ede81f628f9fb2b1084c9064ca67ebe8c0ed6a05/virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6", size = 4329461, upload_time = "2025-03-31T16:33:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/3cfeb48175f0671ec430ede81f628f9fb2b1084c9064ca67ebe8c0ed6a05/virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6", size = 4329461, upload-time = "2025-03-31T16:33:26.758Z" }, ] [[package]] @@ -1043,82 +1178,82 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload_time = "2025-04-08T10:36:26.722Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511, upload_time = "2025-04-08T10:35:17.956Z" }, - { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715, upload_time = "2025-04-08T10:35:19.202Z" }, - { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138, upload_time = "2025-04-08T10:35:20.586Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592, upload_time = "2025-04-08T10:35:21.87Z" }, - { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532, upload_time = "2025-04-08T10:35:23.143Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865, upload_time = "2025-04-08T10:35:24.702Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887, upload_time = "2025-04-08T10:35:25.969Z" }, - { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498, upload_time = "2025-04-08T10:35:27.353Z" }, - { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663, upload_time = "2025-04-08T10:35:28.685Z" }, - { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410, upload_time = "2025-04-08T10:35:30.42Z" }, - { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965, upload_time = "2025-04-08T10:35:32.023Z" }, - { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693, upload_time = "2025-04-08T10:35:33.225Z" }, - { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287, upload_time = "2025-04-08T10:35:34.568Z" }, - { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531, upload_time = "2025-04-08T10:35:35.792Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417, upload_time = "2025-04-08T10:35:37.048Z" }, - { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423, upload_time = "2025-04-08T10:35:38.357Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185, upload_time = "2025-04-08T10:35:39.708Z" }, - { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696, upload_time = "2025-04-08T10:35:41.469Z" }, - { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327, upload_time = "2025-04-08T10:35:43.289Z" }, - { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741, upload_time = "2025-04-08T10:35:44.574Z" }, - { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995, upload_time = "2025-04-08T10:35:46.336Z" }, - { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693, upload_time = "2025-04-08T10:35:48.161Z" }, - { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677, upload_time = "2025-04-08T10:35:49.65Z" }, - { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804, upload_time = "2025-04-08T10:35:51.093Z" }, - { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087, upload_time = "2025-04-08T10:35:52.458Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload-time = "2025-04-08T10:36:26.722Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511, upload-time = "2025-04-08T10:35:17.956Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715, upload-time = "2025-04-08T10:35:19.202Z" }, + { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138, upload-time = "2025-04-08T10:35:20.586Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592, upload-time = "2025-04-08T10:35:21.87Z" }, + { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532, upload-time = "2025-04-08T10:35:23.143Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865, upload-time = "2025-04-08T10:35:24.702Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887, upload-time = "2025-04-08T10:35:25.969Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498, upload-time = "2025-04-08T10:35:27.353Z" }, + { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663, upload-time = "2025-04-08T10:35:28.685Z" }, + { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410, upload-time = "2025-04-08T10:35:30.42Z" }, + { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965, upload-time = "2025-04-08T10:35:32.023Z" }, + { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693, upload-time = "2025-04-08T10:35:33.225Z" }, + { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287, upload-time = "2025-04-08T10:35:34.568Z" }, + { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531, upload-time = "2025-04-08T10:35:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417, upload-time = "2025-04-08T10:35:37.048Z" }, + { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423, upload-time = "2025-04-08T10:35:38.357Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185, upload-time = "2025-04-08T10:35:39.708Z" }, + { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696, upload-time = "2025-04-08T10:35:41.469Z" }, + { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327, upload-time = "2025-04-08T10:35:43.289Z" }, + { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741, upload-time = "2025-04-08T10:35:44.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995, upload-time = "2025-04-08T10:35:46.336Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693, upload-time = "2025-04-08T10:35:48.161Z" }, + { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677, upload-time = "2025-04-08T10:35:49.65Z" }, + { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804, upload-time = "2025-04-08T10:35:51.093Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087, upload-time = "2025-04-08T10:35:52.458Z" }, ] [[package]] name = "wcwidth" version = "0.2.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload_time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload_time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, ] [[package]] name = "websocket-client" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload_time = "2024-04-23T22:16:16.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload_time = "2024-04-23T22:16:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, ] [[package]] name = "websockets" version = "15.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload_time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload_time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload_time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload_time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload_time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload_time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload_time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload_time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload_time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload_time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload_time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload_time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload_time = "2025-03-05T20:02:36.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload_time = "2025-03-05T20:02:37.985Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload_time = "2025-03-05T20:02:39.298Z" }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload_time = "2025-03-05T20:02:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload_time = "2025-03-05T20:02:41.926Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload_time = "2025-03-05T20:02:43.304Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload_time = "2025-03-05T20:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload_time = "2025-03-05T20:02:50.14Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload_time = "2025-03-05T20:02:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload_time = "2025-03-05T20:02:53.814Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload_time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload_time = "2025-03-05T20:03:39.41Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] [[package]] @@ -1128,16 +1263,16 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload_time = "2022-08-23T19:58:21.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload-time = "2022-08-23T19:58:21.447Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload_time = "2022-08-23T19:58:19.96Z" }, + { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload-time = "2022-08-23T19:58:19.96Z" }, ] [[package]] name = "xmod" version = "1.8.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e628a919e1d7129e641997afadd946febdd704aecc5881/xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377", size = 3988, upload_time = "2024-01-04T18:03:17.663Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e628a919e1d7129e641997afadd946febdd704aecc5881/xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377", size = 3988, upload-time = "2024-01-04T18:03:17.663Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload_time = "2024-01-04T18:03:16.078Z" }, + { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload-time = "2024-01-04T18:03:16.078Z" }, ] From 104572dc3db62bc89a1a29a8bcb70237165778dd Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 16:01:58 -0400 Subject: [PATCH 020/565] fix(docs): remove redundant phrase from usage note in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d3d6568..31c9cfad 100644 --- a/README.md +++ b/README.md @@ -177,4 +177,4 @@ This project is licensed under the MIT License - see the LICENSE file for detail --- -**Note**: This tool is for personal use only. Use responsibly and in accordance with LinkedIn's terms of service. Web scraping may violate LinkedIn's terms of service in some cases. +**Note**: This tool is for personal use only. Use responsibly and in accordance with LinkedIn's terms of service. Web scraping may violate LinkedIn's terms of service. From c139b1966a2cfafffc120a278721d8205ad5c7fe Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 20:55:44 -0400 Subject: [PATCH 021/565] chore(tasks): add 'Run main.py' / 'Run main.py (debug)' --- .vscode/tasks.json | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index d3fdce1f..e284e49c 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -18,10 +18,25 @@ "problemMatcher": [] }, { - "label": "Run main.py", + "label": "Run main.py (debug)", "type": "shell", "command": "uv", "args": ["run", "main.py", "--no-headless", "--no-lazy-init", "--debug"], + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, { + "label": "Run main.py", + "type": "shell", + "command": "uv", + "args": ["run", "main.py", "--no-headless", "--no-lazy-init"], "group": { "kind": "build", "isDefault": true From 8ef9f5b21c30515b309242fc51cfd2924fe3e2fa Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 20:56:44 -0400 Subject: [PATCH 022/565] feat(config): implement new centralized configuration management and remove legacy argument parsing --- main.py | 36 ++-- src/linkedin_mcp_server/arguments.py | 64 ------- src/linkedin_mcp_server/cli.py | 20 +- src/linkedin_mcp_server/config/__init__.py | 47 +++++ src/linkedin_mcp_server/config/loaders.py | 139 ++++++++++++++ src/linkedin_mcp_server/config/providers.py | 96 ++++++++++ src/linkedin_mcp_server/config/schema.py | 41 +++++ src/linkedin_mcp_server/config/secrets.py | 59 ++++++ src/linkedin_mcp_server/drivers/chrome.py | 193 +++++++------------- src/linkedin_mcp_server/py.typed | 0 src/linkedin_mcp_server/secrets.py | 165 ----------------- 11 files changed, 477 insertions(+), 383 deletions(-) delete mode 100644 src/linkedin_mcp_server/arguments.py create mode 100644 src/linkedin_mcp_server/config/__init__.py create mode 100644 src/linkedin_mcp_server/config/loaders.py create mode 100644 src/linkedin_mcp_server/config/providers.py create mode 100644 src/linkedin_mcp_server/config/schema.py create mode 100644 src/linkedin_mcp_server/config/secrets.py delete mode 100644 src/linkedin_mcp_server/py.typed delete mode 100644 src/linkedin_mcp_server/secrets.py diff --git a/main.py b/main.py index 890283b4..894d171c 100644 --- a/main.py +++ b/main.py @@ -1,15 +1,15 @@ # main.py """ LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. - -This is the main entry point that runs the LinkedIn MCP server. """ import sys import logging -import inquirer # type: ignore # third-party package without type stubs +import inquirer # type: ignore from typing import Literal, NoReturn -from linkedin_mcp_server.arguments import parse_arguments + +# Import the new centralized configuration +from linkedin_mcp_server.config import get_config from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler @@ -37,30 +37,29 @@ def main() -> None: print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") print("=" * 40) - # Parse command-line arguments - args = parse_arguments() + # Get configuration using the new centralized system + config = get_config() # Configure logging - log_level = logging.DEBUG if args.debug else logging.ERROR + log_level = logging.DEBUG if config.server.debug else logging.ERROR logging.basicConfig( level=log_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", ) logger = logging.getLogger("linkedin_mcp_server") - logger.debug(f"Server arguments: {args}") + logger.debug(f"Server configuration: {config}") - # Initialize the driver - with lazy initialization if specified - initialize_driver(headless=args.headless, lazy_init=args.lazy_init) + # Initialize the driver with configuration + initialize_driver() # Decide transport - if args.setup: - transport: Literal["stdio", "sse"] = choose_transport_interactive() - else: - transport = "stdio" # Default to stdio without prompt + transport = config.server.transport + if config.server.setup: + transport = choose_transport_interactive() # Print configuration for Claude if in setup mode - if args.setup: + if config.server.setup: print_claude_config() # Create and run the MCP server @@ -72,12 +71,7 @@ def main() -> None: def exit_gracefully(exit_code: int = 0) -> NoReturn: - """ - Exit the application gracefully, cleaning up resources. - - Args: - exit_code: The exit code to use when terminating - """ + """Exit the application gracefully, cleaning up resources.""" print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") shutdown_handler() sys.exit(exit_code) diff --git a/src/linkedin_mcp_server/arguments.py b/src/linkedin_mcp_server/arguments.py deleted file mode 100644 index 01dc14c9..00000000 --- a/src/linkedin_mcp_server/arguments.py +++ /dev/null @@ -1,64 +0,0 @@ -# src/linkedin_mcp_server/arguments.py -""" -Command-line argument parsing for LinkedIn MCP server. - -This module handles parsing and validating command-line arguments. -""" - -import argparse -from dataclasses import dataclass - - -@dataclass -class ServerArguments: - """Command-line arguments for the LinkedIn MCP server.""" - - headless: bool - setup: bool - debug: bool - lazy_init: bool - - -def parse_arguments() -> ServerArguments: - """ - Parse command-line arguments for the LinkedIn MCP server. - - Returns: - ServerArguments: Parsed command-line arguments - """ - parser = argparse.ArgumentParser( - description="LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration" - ) - - parser.add_argument( - "--no-headless", - action="store_true", - help="Run Chrome with a visible browser window (useful for debugging)", - ) - - parser.add_argument( - "--debug", - action="store_true", - help="Enable debug mode with additional logging", - ) - - parser.add_argument( - "--no-setup", - action="store_true", - help="Skip printing configuration information and interactive setup", - ) - - parser.add_argument( - "--no-lazy-init", - action="store_true", - help="Initialize Chrome driver and login immediately (not recommended for most users)", - ) - - args = parser.parse_args() - - return ServerArguments( - headless=not args.no_headless, - setup=not args.no_setup, - debug=args.debug, - lazy_init=not args.no_lazy_init, # Default to lazy init - ) diff --git a/src/linkedin_mcp_server/cli.py b/src/linkedin_mcp_server/cli.py index f25a5161..e9833624 100644 --- a/src/linkedin_mcp_server/cli.py +++ b/src/linkedin_mcp_server/cli.py @@ -12,6 +12,8 @@ import logging import pyperclip # type: ignore +from linkedin_mcp_server.config import get_config + logger = logging.getLogger(__name__) @@ -22,6 +24,7 @@ def print_claude_config() -> None: This function generates the configuration needed for Claude Desktop and copies it to the clipboard for easy pasting. """ + config = get_config() current_dir = os.path.abspath( os.path.dirname(os.path.dirname(os.path.dirname(__file__))) ) @@ -45,7 +48,16 @@ def print_claude_config() -> None: "run", "main.py", "--no-setup", - ] # , "--no-lazy-init"] + ] + + # Add environment variables to the configuration + env_vars: Dict[str, str] = {} + if config.linkedin.email: + env_vars["LINKEDIN_EMAIL"] = config.linkedin.email + if config.linkedin.password: + env_vars["LINKEDIN_PASSWORD"] = config.linkedin.password + if config.chrome.chromedriver_path: + env_vars["CHROMEDRIVER"] = config.chrome.chromedriver_path config_json: Dict[str, Any] = { "mcpServers": { @@ -63,6 +75,10 @@ def print_claude_config() -> None: } } + # Add environment variables if available + if env_vars: + config_json["mcpServers"]["linkedin-scraper"]["env"] = env_vars + # Convert to string for clipboard config_str = json.dumps(config_json, indent=2) @@ -75,7 +91,7 @@ def print_claude_config() -> None: # Copy to clipboard try: - pyperclip.copy(config_str) # Only copy the JSON, not the comments + pyperclip.copy(config_str) print("โœ… Claude configuration copied to clipboard!") except ImportError: print( diff --git a/src/linkedin_mcp_server/config/__init__.py b/src/linkedin_mcp_server/config/__init__.py new file mode 100644 index 00000000..b9cbc289 --- /dev/null +++ b/src/linkedin_mcp_server/config/__init__.py @@ -0,0 +1,47 @@ +# src/linkedin_mcp_server/config/__init__.py +from typing import Optional +import logging +from .schema import AppConfig, ChromeConfig, LinkedInConfig, ServerConfig +from .loaders import load_config +from .providers import ( + get_credentials_from_keyring, + save_credentials_to_keyring, + clear_credentials_from_keyring, + get_keyring_name, +) + +logger = logging.getLogger(__name__) + +# Singleton pattern for configuration +_config: Optional[AppConfig] = None + + +def get_config() -> AppConfig: + """Get the application configuration, initializing it if needed.""" + global _config + if _config is None: + _config = load_config() + logger.debug("Configuration loaded") + return _config + + +def reset_config() -> None: + """Reset the configuration to force reloading.""" + global _config + _config = None + logger.debug("Configuration reset") + + +# Export schema classes for type annotations +__all__ = [ + "AppConfig", + "ChromeConfig", + "LinkedInConfig", + "ServerConfig", + "get_config", + "reset_config", + "get_credentials_from_keyring", + "save_credentials_to_keyring", + "clear_credentials_from_keyring", + "get_keyring_name", +] diff --git a/src/linkedin_mcp_server/config/loaders.py b/src/linkedin_mcp_server/config/loaders.py new file mode 100644 index 00000000..3871d4e9 --- /dev/null +++ b/src/linkedin_mcp_server/config/loaders.py @@ -0,0 +1,139 @@ +# src/linkedin_mcp_server/config/loaders.py +import os +import argparse +import logging +from typing import Optional +from .schema import AppConfig +from .providers import get_chromedriver_paths + +logger = logging.getLogger(__name__) + + +def find_chromedriver() -> Optional[str]: + """Find the ChromeDriver executable in common locations.""" + # First check environment variable + if path := os.getenv("CHROMEDRIVER"): + if os.path.exists(path): + return path + + # Check common locations + for path in get_chromedriver_paths(): + if os.path.exists(path) and (os.access(path, os.X_OK) or path.endswith(".exe")): + return path + + return None + + +def load_from_env(config: AppConfig) -> AppConfig: + """Load configuration from environment variables.""" + # LinkedIn credentials + if email := os.environ.get("LINKEDIN_EMAIL"): + config.linkedin.email = email + + if password := os.environ.get("LINKEDIN_PASSWORD"): + config.linkedin.password = password + + # ChromeDriver configuration + if chromedriver := os.environ.get("CHROMEDRIVER"): + config.chrome.chromedriver_path = chromedriver + + # Debug mode + if os.environ.get("DEBUG") in ("1", "true", "True", "yes", "Yes"): + config.server.debug = True + + # Headless mode + if os.environ.get("HEADLESS") in ("0", "false", "False", "no", "No"): + config.chrome.headless = False + + return config + + +def load_from_args(config: AppConfig) -> AppConfig: + """Load configuration from command line arguments.""" + parser = argparse.ArgumentParser( + description="LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration" + ) + + parser.add_argument( + "--no-headless", + action="store_true", + help="Run Chrome with a visible browser window (useful for debugging)", + ) + + parser.add_argument( + "--debug", + action="store_true", + help="Enable debug mode with additional logging", + ) + + parser.add_argument( + "--no-setup", + action="store_true", + help="Skip printing configuration information and interactive setup", + ) + + parser.add_argument( + "--no-lazy-init", + action="store_true", + help="Initialize Chrome driver and login immediately", + ) + + parser.add_argument( + "--transport", + choices=["stdio", "sse"], + default=None, + help="Specify the transport mode (stdio or sse)", + ) + + parser.add_argument( + "--chromedriver", + type=str, + help="Specify the path to the ChromeDriver executable", + ) + + args = parser.parse_args() + + # Update configuration with parsed arguments + if args.no_headless: + config.chrome.headless = False + + if args.debug: + config.server.debug = True + + if args.no_setup: + config.server.setup = False + + if args.no_lazy_init: + config.server.lazy_init = False + + if args.transport: + config.server.transport = args.transport + + if args.chromedriver: + config.chrome.chromedriver_path = args.chromedriver + + return config + + +def load_config() -> AppConfig: + """ + Load configuration from all sources with defined precedence: + 1. Command line arguments (highest priority) + 2. Environment variables + 3. Default values and auto-detection (lowest priority) + """ + # Start with default configuration + config = AppConfig() + + # Auto-detect ChromeDriver path + if chromedriver_path := find_chromedriver(): + config.chrome.chromedriver_path = chromedriver_path + logger.debug(f"Auto-detected ChromeDriver at: {chromedriver_path}") + + # Override with environment variables + config = load_from_env(config) + + # Override with command line arguments (highest priority) + config = load_from_args(config) + + return config diff --git a/src/linkedin_mcp_server/config/providers.py b/src/linkedin_mcp_server/config/providers.py new file mode 100644 index 00000000..31a39779 --- /dev/null +++ b/src/linkedin_mcp_server/config/providers.py @@ -0,0 +1,96 @@ +# src/linkedin_mcp_server/config/providers.py +from typing import Dict, Optional, List +import os +import platform +import logging +import keyring +from keyring.errors import KeyringError + +# Constants +SERVICE_NAME = "linkedin_mcp_server" +EMAIL_KEY = "linkedin_email" +PASSWORD_KEY = "linkedin_password" + +logger = logging.getLogger(__name__) + + +def get_keyring_name() -> str: + """Get the name of the current keyring backend.""" + system = platform.system() + if system == "Darwin": + return "macOS Keychain" + elif system == "Windows": + return "Windows Credential Locker" + else: + return keyring.get_keyring().__class__.__name__ + + +def get_secret_from_keyring(key: str) -> Optional[str]: + """Retrieve a secret from system keyring.""" + try: + secret = keyring.get_password(SERVICE_NAME, key) + return secret + except KeyringError as e: + logger.error(f"Error accessing keyring for {key}: {e}") + return None + + +def set_secret_in_keyring(key: str, value: str) -> bool: + """Store a secret in system keyring.""" + try: + keyring.set_password(SERVICE_NAME, key, value) + logger.debug(f"Secret '{key}' stored successfully in {get_keyring_name()}") + return True + except KeyringError as e: + logger.error(f"Error storing secret '{key}': {e}") + return False + + +def get_credentials_from_keyring() -> Dict[str, Optional[str]]: + """Retrieve LinkedIn credentials from system keyring.""" + email = get_secret_from_keyring(EMAIL_KEY) + password = get_secret_from_keyring(PASSWORD_KEY) + + return {"email": email, "password": password} + + +def save_credentials_to_keyring(email: str, password: str) -> bool: + """Save LinkedIn credentials to system keyring.""" + email_saved = set_secret_in_keyring(EMAIL_KEY, email) + password_saved = set_secret_in_keyring(PASSWORD_KEY, password) + + return email_saved and password_saved + + +def clear_credentials_from_keyring() -> bool: + """Clear stored credentials from the keyring.""" + try: + keyring.delete_password(SERVICE_NAME, EMAIL_KEY) + keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) + logger.info(f"Credentials removed from {get_keyring_name()}") + return True + except KeyringError as e: + logger.error(f"Error clearing credentials: {e}") + return False + + +def get_chromedriver_paths() -> List[str]: + """Get possible ChromeDriver paths based on the platform.""" + paths = [ + os.path.join(os.path.dirname(__file__), "../../../../drivers/chromedriver"), + os.path.join(os.path.expanduser("~"), "chromedriver"), + "/usr/local/bin/chromedriver", + "/usr/bin/chromedriver", + "/opt/homebrew/bin/chromedriver", + "/Applications/chromedriver", + ] + + if platform.system() == "Windows": + paths.extend( + [ + "C:\\Program Files\\chromedriver.exe", + "C:\\Program Files (x86)\\chromedriver.exe", + ] + ) + + return paths diff --git a/src/linkedin_mcp_server/config/schema.py b/src/linkedin_mcp_server/config/schema.py new file mode 100644 index 00000000..8d92585a --- /dev/null +++ b/src/linkedin_mcp_server/config/schema.py @@ -0,0 +1,41 @@ +# src/linkedin_mcp_server/config/schema.py +from dataclasses import dataclass, field +from typing import Optional, List, Literal + + +@dataclass +class ChromeConfig: + """Configuration for Chrome driver.""" + + headless: bool = True + chromedriver_path: Optional[str] = None + browser_args: List[str] = field(default_factory=list) + non_interactive: bool = False + + +@dataclass +class LinkedInConfig: + """LinkedIn connection configuration.""" + + email: Optional[str] = None + password: Optional[str] = None + use_keyring: bool = True + + +@dataclass +class ServerConfig: + """MCP server configuration.""" + + transport: Literal["stdio", "sse"] = "stdio" + lazy_init: bool = True + debug: bool = False + setup: bool = True + + +@dataclass +class AppConfig: + """Main application configuration.""" + + chrome: ChromeConfig = field(default_factory=ChromeConfig) + linkedin: LinkedInConfig = field(default_factory=LinkedInConfig) + server: ServerConfig = field(default_factory=ServerConfig) diff --git a/src/linkedin_mcp_server/config/secrets.py b/src/linkedin_mcp_server/config/secrets.py new file mode 100644 index 00000000..7e42010d --- /dev/null +++ b/src/linkedin_mcp_server/config/secrets.py @@ -0,0 +1,59 @@ +# src/linkedin_mcp_server/config/secrets.py +from typing import Dict, Optional +import logging +import inquirer # type: ignore +from linkedin_mcp_server.config import get_config +from .providers import ( + get_credentials_from_keyring, + save_credentials_to_keyring, + get_keyring_name, +) + +logger = logging.getLogger(__name__) + + +def get_credentials() -> Optional[Dict[str, str]]: + """Get LinkedIn credentials from config, keyring, or prompt.""" + config = get_config() + + # First, try configuration (includes environment variables) + if config.linkedin.email and config.linkedin.password: + print("Using LinkedIn credentials from configuration") + return {"email": config.linkedin.email, "password": config.linkedin.password} + + # Second, try keyring if enabled + if config.linkedin.use_keyring: + credentials = get_credentials_from_keyring() + if credentials["email"] and credentials["password"]: + print(f"Using LinkedIn credentials from {get_keyring_name()}") + return {"email": credentials["email"], "password": credentials["password"]} + + # If in non-interactive mode and no credentials found, return None + if config.chrome.non_interactive: + print("No credentials found in non-interactive mode") + return None + + # Otherwise, prompt for credentials + return prompt_for_credentials() + + +def prompt_for_credentials() -> Dict[str, str]: + """Prompt user for LinkedIn credentials and store them securely.""" + print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") + questions = [ + inquirer.Text("email", message="LinkedIn Email"), + inquirer.Password("password", message="LinkedIn Password"), + ] + credentials = inquirer.prompt(questions) + + if not credentials: + raise KeyboardInterrupt("Credential input was cancelled") + + # Store credentials securely in keyring + if save_credentials_to_keyring(credentials["email"], credentials["password"]): + print(f"โœ… Credentials stored securely in {get_keyring_name()}") + else: + print("โš ๏ธ Warning: Could not store credentials in system keyring.") + print(" Your credentials will only be used for this session.") + + return credentials diff --git a/src/linkedin_mcp_server/drivers/chrome.py b/src/linkedin_mcp_server/drivers/chrome.py index 9cd7d324..4079e45b 100644 --- a/src/linkedin_mcp_server/drivers/chrome.py +++ b/src/linkedin_mcp_server/drivers/chrome.py @@ -5,76 +5,20 @@ This module handles the creation and management of Chrome WebDriver instances. """ -from typing import Dict, Optional, List, Any -import os import sys -import logging -import inquirer # type: ignore +from typing import Dict, Optional +import os from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service from selenium.common.exceptions import WebDriverException -from linkedin_mcp_server.secrets import get_credentials -from linkedin_scraper import actions +import inquirer # type: ignore +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.secrets import get_credentials +from linkedin_mcp_server.config.providers import clear_credentials_from_keyring # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} -is_initialized: bool = False -driver_config: Dict[str, Any] = { - "headless": True, - "non_interactive": False, -} - -logger = logging.getLogger(__name__) - - -def get_chromedriver_path() -> Optional[str]: - """ - Get the ChromeDriver path from environment variable or default locations. - - Returns: - Optional[str]: Path to the ChromeDriver executable if found, None otherwise - """ - # First check environment variable - chromedriver_path = os.getenv("CHROMEDRIVER") - if chromedriver_path and os.path.exists(chromedriver_path): - return chromedriver_path - - # Check common locations - possible_paths: List[str] = [ - os.path.join(os.path.dirname(__file__), "../../../drivers/chromedriver"), - os.path.join(os.path.expanduser("~"), "chromedriver"), - "/usr/local/bin/chromedriver", - "/usr/bin/chromedriver", - # Common MacOS paths - "/opt/homebrew/bin/chromedriver", - "/Applications/chromedriver", - # Common Windows paths - "C:\\Program Files\\chromedriver.exe", - "C:\\Program Files (x86)\\chromedriver.exe", - ] - - for path in possible_paths: - if os.path.exists(path) and (os.access(path, os.X_OK) or path.endswith(".exe")): - return path - - return None - - -def configure_driver(headless: bool = True, non_interactive: bool = False) -> None: - """ - Configure the driver settings without initializing it. - - Args: - headless: Whether to run Chrome in headless mode - non_interactive: Whether to run in non-interactive mode (for Docker/CI) - """ - global driver_config - driver_config["headless"] = headless - driver_config["non_interactive"] = non_interactive - logger.info( - f"Driver configured: headless={headless}, non_interactive={non_interactive}" - ) def get_or_create_driver() -> Optional[webdriver.Chrome]: @@ -88,23 +32,20 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: Raises: WebDriverException: If the driver cannot be created and not in non-interactive mode """ - global is_initialized + config = get_config() session_id = "default" # We use a single session for simplicity # Return existing driver if available if session_id in active_drivers: return active_drivers[session_id] - headless = driver_config["headless"] - non_interactive = driver_config["non_interactive"] - # Set up Chrome options chrome_options = Options() - if headless: - logger.debug("Running Chrome in headless mode") + print( + f"๐ŸŒ Running browser in {'headless' if config.chrome.headless else 'visible'} mode" + ) + if config.chrome.headless: chrome_options.add_argument("--headless=new") - else: - logger.debug("Running Chrome with visible browser window") # Add additional options for stability chrome_options.add_argument("--no-sandbox") @@ -115,56 +56,58 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36" ) + # Add any custom browser arguments from config + for arg in config.chrome.browser_args: + chrome_options.add_argument(arg) + # Initialize Chrome driver try: - chromedriver_path = get_chromedriver_path() - if chromedriver_path: - logger.debug(f"Using ChromeDriver at path: {chromedriver_path}") - service = Service(executable_path=chromedriver_path) + if config.chrome.chromedriver_path: + print(f"๐ŸŒ Using ChromeDriver at path: {config.chrome.chromedriver_path}") + service = Service(executable_path=config.chrome.chromedriver_path) driver = webdriver.Chrome(service=service, options=chrome_options) else: - logger.debug("Using auto-detected ChromeDriver") + print("๐ŸŒ Using auto-detected ChromeDriver") driver = webdriver.Chrome(options=chrome_options) # Add a page load timeout for safety driver.set_page_load_timeout(60) - # Try to log in if we haven't already - if not is_initialized: - if login_to_linkedin(driver, non_interactive): - is_initialized = True - elif non_interactive: - # In non-interactive mode, if login fails, return None - driver.quit() - return None + # Try to log in + if login_to_linkedin(driver): + print("Successfully logged in to LinkedIn") + elif config.chrome.non_interactive: + # In non-interactive mode, if login fails, return None + driver.quit() + return None active_drivers[session_id] = driver return driver except Exception as e: - error_msg = f"Error creating web driver: {e}" - logger.error(error_msg) + error_msg = f"๐Ÿ›‘ Error creating web driver: {e}" + print(error_msg) - if non_interactive: - logger.error("Failed to initialize driver in non-interactive mode") + if config.chrome.non_interactive: + print("๐Ÿ›‘ Failed to initialize driver in non-interactive mode") return None raise WebDriverException(error_msg) -# src/linkedin_mcp_server/drivers/chrome.py (update function) -def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) -> bool: +def login_to_linkedin(driver: webdriver.Chrome) -> bool: """ Log in to LinkedIn using stored or provided credentials. Args: driver: Chrome WebDriver instance - non_interactive: Whether to run in non-interactive mode Returns: bool: True if login was successful, False otherwise """ - # Get credentials - credentials = get_credentials(non_interactive=non_interactive) + config = get_config() + + # Get LinkedIn credentials from config + credentials = get_credentials() if not credentials: print("โŒ No credentials available") @@ -174,6 +117,8 @@ def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) - # Login to LinkedIn print("๐Ÿ”‘ Logging in to LinkedIn...") + from linkedin_scraper import actions # type: ignore + actions.login(driver, credentials["email"], credentials["password"]) print("โœ… Successfully logged in to LinkedIn") @@ -182,13 +127,13 @@ def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) - error_msg = f"Failed to login: {str(e)}" print(f"โŒ {error_msg}") - if not non_interactive: + if not config.chrome.non_interactive: print( "โš ๏ธ You might need to confirm the login in your LinkedIn mobile app. " "Please try again and confirm the login." ) - if driver_config["headless"]: + if config.chrome.headless: print( "๐Ÿ” Try running with visible browser window to see what's happening: " "uv run main.py --no-headless" @@ -206,45 +151,33 @@ def login_to_linkedin(driver: webdriver.Chrome, non_interactive: bool = False) - if retry and retry.get("retry", False): # Clear credentials from keyring and try again - from linkedin_mcp_server.secrets import clear_credentials - - clear_credentials() + clear_credentials_from_keyring() # Try again with new credentials - return login_to_linkedin(driver, non_interactive) + return login_to_linkedin(driver) return False -def initialize_driver(headless: bool = True, lazy_init: bool = False) -> None: +def initialize_driver() -> None: """ - Initialize the driver configuration and optionally create driver and log in. - - Args: - headless: Whether to run Chrome in headless mode - lazy_init: If True, only configure the driver without creating it - (driver will be created on first tool call) + Initialize the driver based on global configuration. """ - # Always configure the driver - configure_driver(headless=headless, non_interactive=lazy_init) + config = get_config() - if lazy_init: - logger.info( - "Using lazy initialization - driver will be created on first tool call" - ) - if "LINKEDIN_EMAIL" in os.environ and "LINKEDIN_PASSWORD" in os.environ: - logger.info("LinkedIn credentials found in environment variables") + if config.server.lazy_init: + print("Using lazy initialization - driver will be created on first tool call") + if config.linkedin.email and config.linkedin.password: + print("LinkedIn credentials found in configuration") else: - logger.warning( - "No LinkedIn credentials in environment variables - will look for stored credentials on first use" + print( + "No LinkedIn credentials found - will look for stored credentials on first use" ) return # Validate chromedriver can be found - chromedriver_path = get_chromedriver_path() - - if chromedriver_path: - print(f"โœ… ChromeDriver found at: {chromedriver_path}") - os.environ["CHROMEDRIVER"] = chromedriver_path + if config.chrome.chromedriver_path: + print(f"โœ… ChromeDriver found at: {config.chrome.chromedriver_path}") + os.environ["CHROMEDRIVER"] = config.chrome.chromedriver_path else: print("โš ๏ธ ChromeDriver not found in common locations.") print("โšก Continuing with automatic detection...") @@ -257,23 +190,19 @@ def initialize_driver(headless: bool = True, lazy_init: bool = False) -> None: driver = get_or_create_driver() if driver: print("โœ… Web driver initialized successfully") - print( - f"๐ŸŒ Browser is running in {'headless' if headless else 'visible'} mode" - ) else: print("โŒ Failed to initialize web driver.") except WebDriverException as e: print(f"โŒ Failed to initialize web driver: {str(e)}") - handle_driver_error(headless) + handle_driver_error() -def handle_driver_error(headless: bool) -> None: +def handle_driver_error() -> None: """ Handle ChromeDriver initialization errors by providing helpful options. - - Args: - headless: Whether Chrome is running in headless mode """ + config = get_config() + questions = [ inquirer.List( "chromedriver_action", @@ -293,13 +222,15 @@ def handle_driver_error(headless: bool) -> None: )["custom_path"] if os.path.exists(path): + # Update config with the new path + config.chrome.chromedriver_path = path os.environ["CHROMEDRIVER"] = path print(f"โœ… ChromeDriver path set to: {path}") # Try again with the new path - initialize_driver(headless=headless) + initialize_driver() else: print(f"โš ๏ธ Warning: The specified path does not exist: {path}") - initialize_driver(headless=headless) + initialize_driver() elif answers["chromedriver_action"] == "help": print("\n๐Ÿ“‹ ChromeDriver Installation Guide:") @@ -316,7 +247,7 @@ def handle_driver_error(headless: bool) -> None: if inquirer.prompt( [inquirer.Confirm("try_again", message="Try again?", default=True)] )["try_again"]: - initialize_driver(headless=headless) + initialize_driver() print("โŒ ChromeDriver is required for this application to work properly.") sys.exit(1) diff --git a/src/linkedin_mcp_server/py.typed b/src/linkedin_mcp_server/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/src/linkedin_mcp_server/secrets.py b/src/linkedin_mcp_server/secrets.py deleted file mode 100644 index 5dc67bb3..00000000 --- a/src/linkedin_mcp_server/secrets.py +++ /dev/null @@ -1,165 +0,0 @@ -# src/linkedin_mcp_server/secrets.py -""" -Secure secrets management for LinkedIn MCP server. - -This module provides secure storage and retrieval of sensitive credentials -using the system's native keychain/credential manager. -""" - -from typing import Dict, Optional -import os -import platform -import logging -import keyring -from keyring.errors import KeyringError -import inquirer # type: ignore - -# Service name for the keyring -SERVICE_NAME = "linkedin_mcp_server" - -# Secret keys -EMAIL_KEY = "linkedin_email" -PASSWORD_KEY = "linkedin_password" - -logger = logging.getLogger(__name__) - - -def get_keyring_name() -> str: - """ - Get the name of the current keyring backend. - - Returns: - str: Human-readable name of the keyring backend based on platform - """ - system = platform.system() - if system == "Darwin": - return "macOS Keychain" - elif system == "Windows": - return "Windows Credential Locker" - else: - return keyring.get_keyring().__class__.__name__ - - -def get_secret(key: str) -> Optional[str]: - """ - Retrieve a secret from system keyring. - - Args: - key: The key identifier for the secret - - Returns: - Optional[str]: The secret value if found, None otherwise - """ - try: - secret = keyring.get_password(SERVICE_NAME, key) - return secret - except KeyringError as e: - logger.error(f"Error accessing keyring for {key}: {e}") - return None - - -def set_secret(key: str, value: str) -> bool: - """ - Store a secret in system keyring. - - Args: - key: The key identifier for the secret - value: The secret value to store - - Returns: - bool: True if successful, False otherwise - """ - try: - keyring.set_password(SERVICE_NAME, key, value) - logger.debug(f"Secret '{key}' stored successfully in {get_keyring_name()}") - return True - except KeyringError as e: - logger.error(f"Error storing secret '{key}': {e}") - return False - - -def get_credentials(non_interactive: bool = False) -> Optional[Dict[str, str]]: - """ - Get LinkedIn credentials from environment variables, keyring, or prompt. - - Args: - non_interactive: If True, only get credentials from environment or keyring, - without prompting the user. - - Returns: - Optional[Dict[str, str]]: Dictionary containing email and password, or None if - not available in non-interactive mode. - """ - # First, try environment variables - email = os.environ.get("LINKEDIN_EMAIL") - password = os.environ.get("LINKEDIN_PASSWORD") - - if email and password: - logger.info("Using LinkedIn credentials from environment variables") - return {"email": email, "password": password} - - # Second, try keyring - email = get_secret(EMAIL_KEY) - password = get_secret(PASSWORD_KEY) - - if email and password: - logger.info(f"Using LinkedIn credentials from {get_keyring_name()}") - return {"email": email, "password": password} - - # If in non-interactive mode and we haven't found credentials yet, return None - if non_interactive: - logger.error("No credentials found in non-interactive mode") - return None - - # Otherwise, prompt for credentials - return prompt_for_credentials() - - -def prompt_for_credentials() -> Dict[str, str]: - """ - Prompt user for LinkedIn credentials and store them securely. - - Returns: - Dict[str, str]: Dictionary containing email and password - """ - print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") - questions = [ - inquirer.Text("email", message="LinkedIn Email"), - inquirer.Password("password", message="LinkedIn Password"), - ] - credentials = inquirer.prompt(questions) - - if not credentials: - raise KeyboardInterrupt("Credential input was cancelled") - - # Store credentials securely in keyring - if set_secret(EMAIL_KEY, credentials["email"]) and set_secret( - PASSWORD_KEY, credentials["password"] - ): - print(f"โœ… Credentials stored securely in {get_keyring_name()}") - else: - print("โš ๏ธ Warning: Could not store credentials in system keyring.") - print(" Your credentials will only be used for this session.") - - return credentials - - -def clear_credentials() -> bool: - """ - Clear stored credentials from the keyring. - - Returns: - bool: True if successful, False otherwise - """ - success = True - try: - # Delete both keys - keyring.delete_password(SERVICE_NAME, EMAIL_KEY) - keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) - print(f"โœ… Credentials removed from {get_keyring_name()}") - except KeyringError as e: - success = False - logger.error(f"Error clearing credentials: {e}") - print(f"โŒ Error clearing credentials: {e}") - - return success From 20a0d4121caf21979fdc00c24e5174f17fababe4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 May 2025 21:04:46 -0400 Subject: [PATCH 023/565] feat(docs): enhance README with configuration system details and secure credential storage instructions --- README.md | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 31c9cfad..60c6347c 100644 --- a/README.md +++ b/README.md @@ -97,6 +97,7 @@ uv run main.py --no-lazy-init --no-headless export LINKEDIN_EMAIL=your.email@example.com export LINKEDIN_PASSWORD=your_password ``` + - Alternatively, you can run the server once manually and you'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.) 3. **Configure Claude Desktop**: - The server will display and copy to your clipboard the configuration needed for Claude Desktop @@ -120,6 +121,66 @@ Example Claude Desktop configuration: } ``` +## โš™๏ธ Configuration System + +### Configuration Hierarchy + +Configuration values are loaded with the following precedence (highest to lowest): + +1. **Command-line arguments**: + ```bash + uv run main.py --no-headless --debug + ``` + +2. **Environment variables**: + ```bash + export LINKEDIN_EMAIL=your.email@example.com + export LINKEDIN_PASSWORD=your_password + export CHROMEDRIVER=/path/to/chromedriver + ``` + *Note: Environment variables always override credentials stored in the system keychain* + +3. **System keychain**: Securely stored credentials from previous sessions + +4. **Default values**: Built-in fallback values + +### Command-line Options + +| Option | Description | +|--------|-------------| +| `--no-headless` | Run Chrome with a visible browser window | +| `--debug` | Enable debug mode with additional logging | +| `--no-setup` | Skip configuration setup prompts | +| `--no-lazy-init` | Initialize Chrome driver immediately (instead of on first use) | + +### Credential Storage + +Your LinkedIn credentials are stored securely using your system's native keychain/credential manager: + +- **macOS**: macOS Keychain +- **Windows**: Windows Credential Locker +- **Linux**: Native keyring (varies by distribution) + +Credentials are managed as follows: + +1. First, the application checks for credentials in environment variables +2. Next, it checks the system keychain for stored credentials +3. If no credentials are found, you'll be prompted to enter them (in interactive mode) +4. Entered credentials are securely stored in your system keychain for future use + +### Clearing Stored Credentials + +If you need to change your stored credentials, run the application with the `--no-lazy-init` flag and when prompted about login failure, select "Yes" to try with different credentials. + +### ChromeDriver Configuration + +The ChromeDriver path is found in this order: +1. From the `CHROMEDRIVER` environment variable +2. Auto-detected from common locations +3. Manually specified when prompted (if auto-detection fails) + +Once specified, the ChromeDriver path is used for the current session but not stored persistently. + ## ๐Ÿ”„ Using with Claude Desktop 1. **After adding the configuration** to Claude Desktop, restart the application @@ -134,7 +195,7 @@ Examples of what you can ask Claude: ## ๐Ÿ” Security and Privacy -- Your LinkedIn credentials can be provided through environment variables or stored locally at `~/.linkedin_mcp_credentials.json` with user-only permissions +- Your LinkedIn credentials are securely stored in your system's native keychain/credential manager with user-only permissions - Credentials are never exposed to Claude or any other AI and are only used for the LinkedIn login to scrape data - The server runs on your local machine, not in the cloud - All LinkedIn scraping happens through your account - be aware that profile visits are visible to other users From 454feca9711d74cb0bd4d156d2e801294000f8e6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 4 May 2025 21:09:28 -0400 Subject: [PATCH 024/565] Update README.md --- README.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 60c6347c..31c29e6a 100644 --- a/README.md +++ b/README.md @@ -105,21 +105,21 @@ uv run main.py --no-lazy-init --no-headless - Paste the configuration provided by the server - Edit the configuration to include your LinkedIn credentials as environment variables -Example Claude Desktop configuration: -```json -{ - "mcpServers": { - "linkedin-scraper": { - "command": "/path/to/uv", - "args": ["--directory", "/path/to/project", "run", "main.py", "--no-setup"], - "env": { - "LINKEDIN_EMAIL": "your.email@example.com", - "LINKEDIN_PASSWORD": "your_password" - } - } - } -} -``` + Example Claude Desktop configuration: + ```json + { + "mcpServers": { + "linkedin-scraper": { + "command": "/path/to/uv", + "args": ["--directory", "/path/to/project", "run", "main.py", "--no-setup"], + "env": { + "LINKEDIN_EMAIL": "your.email@example.com", + "LINKEDIN_PASSWORD": "your_password" + } + } + } + } + ``` ## โš™๏ธ Configuration System @@ -185,7 +185,7 @@ Once specified, the ChromeDriver path is used for the current session but not st 1. **After adding the configuration** to Claude Desktop, restart the application 2. **Start a conversation** with Claude -3. **You'll see tools available** in the tools menu (hammer icon) +3. **You'll see tools available** in the tools menu (settings icon) 4. **You can now ask Claude** to retrieve LinkedIn profiles, search for jobs, etc. Examples of what you can ask Claude: From 2799b98d73b58eedd89cda839b6be4e4cdefb3c9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 15:21:55 -0400 Subject: [PATCH 025/565] chore(vscode): add settings for automatic formatting and import organization in Python files --- .vscode/settings.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..73ce5e12 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,16 @@ +{ + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + }, + "editor.defaultFormatter": "charliermarsh.ruff", + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports.ruff": "explicit" + } + }, +} From d9fa499d8405468e8a05f7f364a54f23930f14d9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 15:23:56 -0400 Subject: [PATCH 026/565] chore(dependencies): update linkedin-scraper repository URL to original source instead of my fork as my pr was merged --- pyproject.toml | 2 +- uv.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a22fe9c9..348f70a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ ] [tool.uv.sources] -linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git" } +linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] diff --git a/uv.lock b/uv.lock index d943e27b..ba2befcd 100644 --- a/uv.lock +++ b/uv.lock @@ -490,7 +490,7 @@ requires-dist = [ { name = "httpx", specifier = ">=0.28.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, - { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, + { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, { name = "mypy", specifier = ">=1.15.0" }, { name = "pre-commit", specifier = ">=4.2.0" }, @@ -500,7 +500,7 @@ requires-dist = [ [[package]] name = "linkedin-scraper" version = "2.11.5" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#23ee0ece83c2ed2b97e697ead2aadc4708e8fa8a" } +source = { git = "https://github.com/joeyism/linkedin_scraper.git#44eafb893e691732474e37a20123c5cc9007e0ad" } dependencies = [ { name = "lxml" }, { name = "requests" }, From af2e0639425fde84d630be581b3d0f6fc7e4dbf2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 15:51:38 -0400 Subject: [PATCH 027/565] refactor: move to flat layout for clone-and-run usage - Move src/linkedin_mcp_server/ to root as linkedin_mcp_server/ - Remove unnecessary build system configuration - Simplify project structure for direct repository usage - Maintain all functionality with cleaner imports --- .gitignore | 187 +++++++++++++++++- .../__init__.py | 0 .../cli.py | 0 .../config/__init__.py | 0 .../config/loaders.py | 0 .../config/providers.py | 0 .../config/schema.py | 0 .../config/secrets.py | 0 .../drivers/__init__.py | 0 .../drivers/chrome.py | 0 .../server.py | 0 .../tools/__init__.py | 0 .../tools/company.py | 0 .../tools/job.py | 0 .../tools/person.py | 0 pyproject.toml | 6 +- uv.lock | 158 +++++++-------- 17 files changed, 265 insertions(+), 86 deletions(-) rename {src/linkedin_mcp_server => linkedin_mcp_server}/__init__.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/cli.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/config/__init__.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/config/loaders.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/config/providers.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/config/schema.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/config/secrets.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/drivers/__init__.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/drivers/chrome.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/server.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/tools/__init__.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/tools/company.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/tools/job.py (100%) rename {src/linkedin_mcp_server => linkedin_mcp_server}/tools/person.py (100%) diff --git a/.gitignore b/.gitignore index 505a3b1c..907cba9c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,189 @@ -# Python-generated files +# Byte-compiled / optimized / DLL files __pycache__/ -*.py[oc] +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python build/ +develop-eggs/ dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ wheels/ -*.egg-info +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock -# Virtual environments +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env .venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the enitre vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Cursor +# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to +# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data +# refer to https://docs.cursor.com/context/ignore-files +.cursorignore +.cursorindexingignore +.cursor diff --git a/src/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py similarity index 100% rename from src/linkedin_mcp_server/__init__.py rename to linkedin_mcp_server/__init__.py diff --git a/src/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py similarity index 100% rename from src/linkedin_mcp_server/cli.py rename to linkedin_mcp_server/cli.py diff --git a/src/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py similarity index 100% rename from src/linkedin_mcp_server/config/__init__.py rename to linkedin_mcp_server/config/__init__.py diff --git a/src/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py similarity index 100% rename from src/linkedin_mcp_server/config/loaders.py rename to linkedin_mcp_server/config/loaders.py diff --git a/src/linkedin_mcp_server/config/providers.py b/linkedin_mcp_server/config/providers.py similarity index 100% rename from src/linkedin_mcp_server/config/providers.py rename to linkedin_mcp_server/config/providers.py diff --git a/src/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py similarity index 100% rename from src/linkedin_mcp_server/config/schema.py rename to linkedin_mcp_server/config/schema.py diff --git a/src/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py similarity index 100% rename from src/linkedin_mcp_server/config/secrets.py rename to linkedin_mcp_server/config/secrets.py diff --git a/src/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py similarity index 100% rename from src/linkedin_mcp_server/drivers/__init__.py rename to linkedin_mcp_server/drivers/__init__.py diff --git a/src/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py similarity index 100% rename from src/linkedin_mcp_server/drivers/chrome.py rename to linkedin_mcp_server/drivers/chrome.py diff --git a/src/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py similarity index 100% rename from src/linkedin_mcp_server/server.py rename to linkedin_mcp_server/server.py diff --git a/src/linkedin_mcp_server/tools/__init__.py b/linkedin_mcp_server/tools/__init__.py similarity index 100% rename from src/linkedin_mcp_server/tools/__init__.py rename to linkedin_mcp_server/tools/__init__.py diff --git a/src/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py similarity index 100% rename from src/linkedin_mcp_server/tools/company.py rename to linkedin_mcp_server/tools/company.py diff --git a/src/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py similarity index 100% rename from src/linkedin_mcp_server/tools/job.py rename to linkedin_mcp_server/tools/job.py diff --git a/src/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py similarity index 100% rename from src/linkedin_mcp_server/tools/person.py rename to linkedin_mcp_server/tools/person.py diff --git a/pyproject.toml b/pyproject.toml index 348f70a5..9967b8e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,8 +16,8 @@ dependencies = [ "pyperclip>=1.9.0", ] -[tool.uv.sources] -linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } - [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] + +[tool.uv.sources] +linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } diff --git a/uv.lock b/uv.lock index ba2befcd..498df0e3 100644 --- a/uv.lock +++ b/uv.lock @@ -59,11 +59,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload-time = "2025-01-31T02:16:47.166Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload-time = "2025-01-31T02:16:45.015Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, ] [[package]] @@ -106,37 +106,37 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload-time = "2024-12-24T18:11:05.834Z" }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload-time = "2024-12-24T18:11:07.064Z" }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload-time = "2024-12-24T18:11:08.374Z" }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload-time = "2024-12-24T18:11:09.831Z" }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload-time = "2024-12-24T18:11:12.03Z" }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload-time = "2024-12-24T18:11:13.372Z" }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload-time = "2024-12-24T18:11:14.628Z" }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload-time = "2024-12-24T18:11:17.672Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload-time = "2024-12-24T18:11:18.989Z" }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload-time = "2024-12-24T18:11:21.507Z" }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload-time = "2024-12-24T18:11:22.774Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload-time = "2024-12-24T18:11:24.139Z" }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload-time = "2024-12-24T18:11:26.535Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" }, +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, ] [[package]] @@ -471,7 +471,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" version = "0.1.0" -source = { virtual = "." } +source = { editable = "." } dependencies = [ { name = "fastapi", extra = ["standard"] }, { name = "httpx" }, @@ -509,44 +509,44 @@ dependencies = [ [[package]] name = "lxml" -version = "5.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/61/d3dc048cd6c7be6fe45b80cedcbdd4326ba4d550375f266d9f4246d0f4bc/lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1", size = 3679948, upload-time = "2025-04-05T18:31:58.757Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/7e/c749257a7fabc712c4df57927b0f703507f316e9f2c7e3219f8f76d36145/lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0", size = 8193212, upload-time = "2025-04-05T18:26:42.692Z" }, - { url = "https://files.pythonhosted.org/packages/a8/50/17e985ba162c9f1ca119f4445004b58f9e5ef559ded599b16755e9bfa260/lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f", size = 4451439, upload-time = "2025-04-05T18:26:46.468Z" }, - { url = "https://files.pythonhosted.org/packages/c2/b5/4960ba0fcca6ce394ed4a2f89ee13083e7fcbe9641a91166e8e9792fedb1/lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554", size = 5052146, upload-time = "2025-04-05T18:26:49.737Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d1/184b04481a5d1f5758916de087430752a7b229bddbd6c1d23405078c72bd/lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b", size = 4789082, upload-time = "2025-04-05T18:26:52.295Z" }, - { url = "https://files.pythonhosted.org/packages/7d/75/1a19749d373e9a3d08861addccdf50c92b628c67074b22b8f3c61997cf5a/lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d", size = 5312300, upload-time = "2025-04-05T18:26:54.923Z" }, - { url = "https://files.pythonhosted.org/packages/fb/00/9d165d4060d3f347e63b219fcea5c6a3f9193e9e2868c6801e18e5379725/lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932", size = 4836655, upload-time = "2025-04-05T18:26:57.488Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/06720a33cc155966448a19677f079100517b6629a872382d22ebd25e48aa/lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30", size = 4961795, upload-time = "2025-04-05T18:27:00.126Z" }, - { url = "https://files.pythonhosted.org/packages/2d/57/4540efab2673de2904746b37ef7f74385329afd4643ed92abcc9ec6e00ca/lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d", size = 4779791, upload-time = "2025-04-05T18:27:03.061Z" }, - { url = "https://files.pythonhosted.org/packages/99/ad/6056edf6c9f4fa1d41e6fbdae52c733a4a257fd0d7feccfa26ae051bb46f/lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050", size = 5346807, upload-time = "2025-04-05T18:27:05.877Z" }, - { url = "https://files.pythonhosted.org/packages/a1/fa/5be91fc91a18f3f705ea5533bc2210b25d738c6b615bf1c91e71a9b2f26b/lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988", size = 4909213, upload-time = "2025-04-05T18:27:08.588Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/71bb96a3b5ae36b74e0402f4fa319df5559a8538577f8c57c50f1b57dc15/lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927", size = 4987694, upload-time = "2025-04-05T18:27:11.66Z" }, - { url = "https://files.pythonhosted.org/packages/08/c2/3953a68b0861b2f97234b1838769269478ccf872d8ea7a26e911238220ad/lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc", size = 4862865, upload-time = "2025-04-05T18:27:14.194Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9a/52e48f7cfd5a5e61f44a77e679880580dfb4f077af52d6ed5dd97e3356fe/lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e", size = 5423383, upload-time = "2025-04-05T18:27:16.988Z" }, - { url = "https://files.pythonhosted.org/packages/17/67/42fe1d489e4dcc0b264bef361aef0b929fbb2b5378702471a3043bc6982c/lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93", size = 5286864, upload-time = "2025-04-05T18:27:19.703Z" }, - { url = "https://files.pythonhosted.org/packages/29/e4/03b1d040ee3aaf2bd4e1c2061de2eae1178fe9a460d3efc1ea7ef66f6011/lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31", size = 5056819, upload-time = "2025-04-05T18:27:22.814Z" }, - { url = "https://files.pythonhosted.org/packages/83/b3/e2ec8a6378e4d87da3af9de7c862bcea7ca624fc1a74b794180c82e30123/lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71", size = 3486177, upload-time = "2025-04-05T18:27:25.078Z" }, - { url = "https://files.pythonhosted.org/packages/d5/8a/6a08254b0bab2da9573735725caab8302a2a1c9b3818533b41568ca489be/lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d", size = 3817134, upload-time = "2025-04-05T18:27:27.481Z" }, - { url = "https://files.pythonhosted.org/packages/19/fe/904fd1b0ba4f42ed5a144fcfff7b8913181892a6aa7aeb361ee783d441f8/lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d", size = 8173598, upload-time = "2025-04-05T18:27:31.229Z" }, - { url = "https://files.pythonhosted.org/packages/97/e8/5e332877b3ce4e2840507b35d6dbe1cc33b17678ece945ba48d2962f8c06/lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee", size = 4441586, upload-time = "2025-04-05T18:27:33.883Z" }, - { url = "https://files.pythonhosted.org/packages/de/f4/8fe2e6d8721803182fbce2325712e98f22dbc478126070e62731ec6d54a0/lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585", size = 5038447, upload-time = "2025-04-05T18:27:36.426Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ac/fa63f86a1a4b1ba8b03599ad9e2f5212fa813223ac60bfe1155390d1cc0c/lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf", size = 4783583, upload-time = "2025-04-05T18:27:39.492Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7a/08898541296a02c868d4acc11f31a5839d80f5b21d4a96f11d4c0fbed15e/lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c", size = 5305684, upload-time = "2025-04-05T18:27:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/0b/be/9a6d80b467771b90be762b968985d3de09e0d5886092238da65dac9c1f75/lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2", size = 4830797, upload-time = "2025-04-05T18:27:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/8d/1c/493632959f83519802637f7db3be0113b6e8a4e501b31411fbf410735a75/lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69", size = 4950302, upload-time = "2025-04-05T18:27:47.979Z" }, - { url = "https://files.pythonhosted.org/packages/c7/13/01aa3b92a6b93253b90c061c7527261b792f5ae7724b420cded733bfd5d6/lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d", size = 4775247, upload-time = "2025-04-05T18:27:51.174Z" }, - { url = "https://files.pythonhosted.org/packages/60/4a/baeb09fbf5c84809e119c9cf8e2e94acec326a9b45563bf5ae45a234973b/lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e", size = 5338824, upload-time = "2025-04-05T18:27:54.15Z" }, - { url = "https://files.pythonhosted.org/packages/69/c7/a05850f169ad783ed09740ac895e158b06d25fce4b13887a8ac92a84d61c/lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1", size = 4899079, upload-time = "2025-04-05T18:27:57.03Z" }, - { url = "https://files.pythonhosted.org/packages/de/48/18ca583aba5235582db0e933ed1af6540226ee9ca16c2ee2d6f504fcc34a/lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606", size = 4978041, upload-time = "2025-04-05T18:27:59.918Z" }, - { url = "https://files.pythonhosted.org/packages/b6/55/6968ddc88554209d1dba0dca196360c629b3dfe083bc32a3370f9523a0c4/lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b", size = 4859761, upload-time = "2025-04-05T18:28:02.83Z" }, - { url = "https://files.pythonhosted.org/packages/2e/52/d2d3baa1e0b7d04a729613160f1562f466fb1a0e45085a33acb0d6981a2b/lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae", size = 5418209, upload-time = "2025-04-05T18:28:05.851Z" }, - { url = "https://files.pythonhosted.org/packages/d3/50/6005b297ba5f858a113d6e81ccdb3a558b95a615772e7412d1f1cbdf22d7/lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9", size = 5274231, upload-time = "2025-04-05T18:28:08.849Z" }, - { url = "https://files.pythonhosted.org/packages/fb/33/6f40c09a5f7d7e7fcb85ef75072e53eba3fbadbf23e4991ca069ab2b1abb/lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6", size = 5051899, upload-time = "2025-04-05T18:28:11.729Z" }, - { url = "https://files.pythonhosted.org/packages/8b/3a/673bc5c0d5fb6596ee2963dd016fdaefaed2c57ede82c7634c08cbda86c1/lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1", size = 3485315, upload-time = "2025-04-05T18:28:14.815Z" }, - { url = "https://files.pythonhosted.org/packages/8c/be/cab8dd33b0dbe3af5b5d4d24137218f79ea75d540f74eb7d8581195639e0/lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe", size = 3814639, upload-time = "2025-04-05T18:28:17.268Z" }, +version = "5.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" }, + { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" }, + { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" }, + { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" }, + { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" }, + { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" }, + { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" }, + { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" }, + { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" }, + { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" }, + { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" }, + { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" }, + { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086, upload-time = "2025-04-23T01:46:52.218Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613, upload-time = "2025-04-23T01:46:55.281Z" }, + { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008, upload-time = "2025-04-23T01:46:57.817Z" }, + { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915, upload-time = "2025-04-23T01:47:00.745Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890, upload-time = "2025-04-23T01:47:04.702Z" }, + { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644, upload-time = "2025-04-23T01:47:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817, upload-time = "2025-04-23T01:47:10.317Z" }, + { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916, upload-time = "2025-04-23T01:47:12.823Z" }, + { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274, upload-time = "2025-04-23T01:47:15.916Z" }, + { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757, upload-time = "2025-04-23T01:47:19.793Z" }, + { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028, upload-time = "2025-04-23T01:47:22.401Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487, upload-time = "2025-04-23T01:47:25.513Z" }, + { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688, upload-time = "2025-04-23T01:47:28.454Z" }, + { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043, upload-time = "2025-04-23T01:47:31.208Z" }, + { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569, upload-time = "2025-04-23T01:47:33.805Z" }, + { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270, upload-time = "2025-04-23T01:47:36.133Z" }, + { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606, upload-time = "2025-04-23T01:47:39.028Z" }, ] [[package]] @@ -956,7 +956,7 @@ wheels = [ [[package]] name = "selenium" -version = "4.31.0" +version = "4.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -966,9 +966,9 @@ dependencies = [ { name = "urllib3", extra = ["socks"] }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/bf/642cce8b5a9edad8e4880fdefbeb24f69bec2086b1121c63f883c412b797/selenium-4.31.0.tar.gz", hash = "sha256:441cffc436a2e6659fe3cfb012692435652efd38b0d368d16f661a5db47825f5", size = 855418, upload-time = "2025-04-05T00:43:06.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/7e/4145666dd275760b56d0123a9439915af167932dd6caa19b5f8b281ae297/selenium-4.33.0.tar.gz", hash = "sha256:d90974db95d2cdeb34d2fb1b13f03dc904f53e6c5d228745b0635ada10cd625d", size = 882387, upload-time = "2025-05-23T17:45:22.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/53/212db779d2481b0a8428365960596f8d5a4d482ae12c441d0507fd54aaf2/selenium-4.31.0-py3-none-any.whl", hash = "sha256:7b8b8d5e424d7133cb7aa656263b19ac505ec26d65c0f921a696e7e2c5ccd95b", size = 9350584, upload-time = "2025-04-05T00:43:04.04Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c0/092fde36918574e144613de73ba43c36ab8d31e7d36bb44c35261909452d/selenium-4.33.0-py3-none-any.whl", hash = "sha256:af9ea757813918bddfe05cc677bf63c8a0cd277ebf8474b3dd79caa5727fca85", size = 9370835, upload-time = "2025-05-23T17:45:19.448Z" }, ] [[package]] @@ -1034,7 +1034,7 @@ wheels = [ [[package]] name = "trio" -version = "0.29.0" +version = "0.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -1044,9 +1044,9 @@ dependencies = [ { name = "sniffio" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952, upload-time = "2025-02-14T07:13:50.724Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/c1/68d582b4d3a1c1f8118e18042464bb12a7c1b75d64d75111b297687041e3/trio-0.30.0.tar.gz", hash = "sha256:0781c857c0c81f8f51e0089929a26b5bb63d57f927728a5586f7e36171f064df", size = 593776, upload-time = "2025-04-21T00:48:19.507Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920, upload-time = "2025-02-14T07:13:48.696Z" }, + { url = "https://files.pythonhosted.org/packages/69/8e/3f6dfda475ecd940e786defe6df6c500734e686c9cd0a0f8ef6821e9b2f2/trio-0.30.0-py3-none-any.whl", hash = "sha256:3bf4f06b8decf8d3cf00af85f40a89824669e2d033bb32469d34840edcfc22a5", size = 499194, upload-time = "2025-04-21T00:48:17.167Z" }, ] [[package]] From a98555648c3ab056aa3df4d2c60e8d3c1d8927ef Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 15:52:59 -0400 Subject: [PATCH 028/565] chore(uv.lock): update source field for linkedin-mcp-server --- uv.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uv.lock b/uv.lock index 498df0e3..f78f0237 100644 --- a/uv.lock +++ b/uv.lock @@ -471,7 +471,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" version = "0.1.0" -source = { editable = "." } +source = { virtual = "." } dependencies = [ { name = "fastapi", extra = ["standard"] }, { name = "httpx" }, From 9ea839d27112ff2641d3cd858dd5e9c369104b8a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 16:19:27 -0400 Subject: [PATCH 029/565] fix(job): clarify job URL format in docstring so we always make the right toolcall even if its an url like https://www.linkedin.com/jobs/collections/recommended/?currentJobId=4153484181 --- linkedin_mcp_server/tools/job.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 9df4e3aa..0e243894 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -5,9 +5,10 @@ This module provides tools for scraping LinkedIn job postings and searches. """ -from typing import Dict, Any, List -from mcp.server.fastmcp import FastMCP +from typing import Any, Dict, List + from linkedin_scraper import Job, JobSearch +from mcp.server.fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import get_or_create_driver @@ -25,11 +26,22 @@ async def get_job_details(job_url: str) -> Dict[str, Any]: """ Scrape job details from a LinkedIn job posting. + IMPORTANT: Only use direct LinkedIn job URLs in the format: + https://www.linkedin.com/jobs/view/[JOB_ID] + + DO NOT use collection URLs like: + - /collections/recommended/?currentJobId= + - /jobs/search/?keywords= + + If you have a collection URL, extract the job ID and convert it to the direct format. + Example: If you see currentJobId=1234567890, use https://www.linkedin.com/jobs/view/1234567890 + Args: - job_url (str): The LinkedIn URL of the job posting + job_url (str): The direct LinkedIn job URL (must be /jobs/view/[ID] format) Returns: - Dict[str, Any]: Structured data from the job posting + Dict[str, Any]: Structured job data including title, company, location, posting date, + application count, and job description (may be empty if content is protected) """ driver = get_or_create_driver() From 98977ba658fe04dfaa9fd80fe36325d74824baa5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 16:22:14 -0400 Subject: [PATCH 030/565] feat(docs): update project description and features in README to set right expectations --- README.md | 52 +++++++++++++++++++++++++++++++++++++++++--------- pyproject.toml | 2 +- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 31c29e6a..e14c6dff 100644 --- a/README.md +++ b/README.md @@ -8,11 +8,18 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn thr https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 -## ๐Ÿ“‹ Features +## ๐Ÿ“‹ Features & Tool Status -- **Profile Scraping**: Get detailed information from LinkedIn profiles -- **Company Analysis**: Extract company information, including employees if desired -- **Job Search**: Search for jobs and get recommended positions +### โœ… **Working Tools** +- **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections +- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details (employees optional) +- **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs +- **Session Management** (`close_session`): Properly close browser sessions and clean up resources + +### โš ๏ธ **Tools with Known Issues** +- **Job Search** (`search_jobs`): Currently experiencing ChromeDriver compatibility issues with LinkedIn's search interface +- **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods +- **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) ## ๐Ÿ”ง Installation @@ -186,12 +193,17 @@ Once specified, the ChromeDriver path is used for the current session but not st 1. **After adding the configuration** to Claude Desktop, restart the application 2. **Start a conversation** with Claude 3. **You'll see tools available** in the tools menu (settings icon) -4. **You can now ask Claude** to retrieve LinkedIn profiles, search for jobs, etc. +4. **You can now ask Claude** to retrieve LinkedIn profiles, companies, and job details + +### โœ… **Recommended Usage Examples** +- "Can you tell me about Daniel's work experience? His LinkedIn profile is https://www.linkedin.com/in/stickerdaniel/" +- "Get details about this job posting: https://www.linkedin.com/jobs/view/1234567890" +- "Tell me about Electric Mind as a company based on their LinkedIn page" +- "Get the profile information for this LinkedIn user: [profile URL]" -Examples of what you can ask Claude: -- "Can you tell me about Daniels work experience? His LinkedIn profile is https://www.linkedin.com/in/stickerdaniel/" -- "Search for machine learning engineer jobs on LinkedIn" -- "Tell me about Google as a company based on their LinkedIn page" +### โš ๏ธ **Usage Examples with Known Issues** +- ~~"Search for machine learning engineer jobs on LinkedIn"~~ (currently not working due to browser automation issues) +- ~~"Show me recommended jobs from LinkedIn"~~ (has compatibility issues) ## ๐Ÿ” Security and Privacy @@ -202,6 +214,28 @@ Examples of what you can ask Claude: ## โš ๏ธ Troubleshooting +### Tool-Specific Issues + +**Job Search (`search_jobs`) Not Working:** +- This tool currently has ChromeDriver compatibility issues +- Use direct job URLs with `get_job_details` instead +- LinkedIn's search interface has anti-automation measures + +**Recommended Jobs (`get_recommended_jobs`) Errors:** +- Contains outdated Selenium methods (`find_elements_by_class_name`) +- LinkedIn has updated their DOM structure +- Use manual job discovery and direct URLs as workaround + +**Empty Job Descriptions:** +- Job content may be dynamically loaded +- Some job postings have protected content +- Try accessing the job URL directly in a browser first + +**Company Profile Access Issues:** +- Some companies restrict automated access +- Corporate LinkedIn pages may have different structures +- Try with well-known company URLs first + ### ChromeDriver Issues If you encounter ChromeDriver errors: diff --git a/pyproject.toml b/pyproject.toml index 9967b8e7..a4eddb49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "linkedin-mcp-server" version = "0.1.0" -description = "Add your description here" +description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" dependencies = [ From 4f9b5d43d1406485db010479fa703cce94e5c078 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 May 2025 16:42:51 -0400 Subject: [PATCH 031/565] docs(readme): new installation instructions with right UV project workflow - Replace manual dependency installation with uv sync - Separate dev dependencies --- README.md | 98 +++++++----- pyproject.toml | 12 +- uv.lock | 412 +++++++++++++++++-------------------------------- 3 files changed, 212 insertions(+), 310 deletions(-) diff --git a/README.md b/README.md index e14c6dff..68d2be79 100644 --- a/README.md +++ b/README.md @@ -25,47 +25,46 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ### Prerequisites -- Python 3.8 or higher +- Python 3.12 or higher - Chrome browser installed -- ChromeDriver matching your Chrome version +- ChromeDriver matching your Chrome version (we'll help you set this up) - A LinkedIn account -### Step 1: Clone or Download the Repository +### Quick Start (Recommended) ```bash +# 1. Clone the repository git clone https://github.com/stickerdaniel/linkedin-mcp-server cd linkedin-mcp-server -``` - -Or download and extract the zip file. - -### Step 2: Set Up a Virtual Environment -Using `uv` (recommended): - -```bash -# Install uv if you don't have it +# 2. Install UV if you don't have it curl -LsSf https://astral.sh/uv/install.sh | sh -# Create and activate virtual environment -uv venv -source .venv/bin/activate # On macOS/Linux -# OR -.venv\Scripts\activate # On Windows +# 3. Install the project and all dependencies +uv sync + +# 4. Run the server for initial setup (this will prompt you for credentials) +uv run main.py --no-lazy-init --no-headless ``` -### Step 3: Install Dependencies +That's it! UV will: +- Automatically create a virtual environment +- Install all dependencies from `pyproject.toml` +- Handle the [LinkedIn scraper](https://github.com/joeyism/linkedin_scraper) git dependency + +#### For Development -Using `uv`: +If you want to contribute or modify the code: ```bash -uv add "mcp[cli]" selenium httpx inquirer pyperclip -uv add "git+https://github.com/stickerdaniel/linkedin_scraper.git" -uv pip install -e . -pre-commit install +# Install with development dependencies +uv sync --group dev + +# Install pre-commit hooks +uv run pre-commit install ``` -### Step 4: Install ChromeDriver +### ChromeDriver Setup ChromeDriver is required for Selenium to interact with Chrome. You need to install the version that matches your Chrome browser. @@ -90,35 +89,45 @@ ChromeDriver is required for Selenium to interact with Chrome. You need to insta ## ๐Ÿš€ Running the Server -1. **Start the server once manually**: +### Quick Start + +After installation, simply run: ```bash -# Using uv (recommended) +# Start the server (first time setup) uv run main.py --no-lazy-init --no-headless ``` -2. **Lazy initialization (default behavior)**: - - The server uses lazy initialization, meaning it will only create the Chrome driver and log in when a tool is actually used - - You can set environment variables for non-interactive use: - ```bash - export LINKEDIN_EMAIL=your.email@example.com - export LINKEDIN_PASSWORD=your_password - ``` - - Alternatively, you can run the server once manually and you'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.) +### Running Options + +```bash +# Normal operation (lazy initialization) +uv run main.py + +# Debug mode with visible browser +uv run main.py --no-headless --debug + +# Skip setup prompts (for automation) +uv run main.py --no-setup +``` + +### Configuration for Claude Desktop + +1. **The server will automatically**: + - Display the configuration needed for Claude Desktop + - Copy it to your clipboard for easy pasting -3. **Configure Claude Desktop**: - - The server will display and copy to your clipboard the configuration needed for Claude Desktop +2. **Add to Claude Desktop**: - Open Claude Desktop and go to Settings > Developer > Edit Config - Paste the configuration provided by the server - - Edit the configuration to include your LinkedIn credentials as environment variables Example Claude Desktop configuration: ```json { "mcpServers": { "linkedin-scraper": { - "command": "/path/to/uv", - "args": ["--directory", "/path/to/project", "run", "main.py", "--no-setup"], + "command": "uv", + "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py", "--no-setup"], "env": { "LINKEDIN_EMAIL": "your.email@example.com", "LINKEDIN_PASSWORD": "your_password" @@ -128,6 +137,17 @@ uv run main.py --no-lazy-init --no-headless } ``` +### Credential Management + +- **Lazy initialization (default behavior)**: + - The server uses lazy initialization, meaning it will only create the Chrome driver and log in when a tool is actually used + - You can set environment variables for non-interactive use: + ```bash + export LINKEDIN_EMAIL=your.email@example.com + export LINKEDIN_PASSWORD=your_password + ``` + - Alternatively, you can run the server once manually and you'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.) + ## โš™๏ธ Configuration System ### Configuration Hierarchy diff --git a/pyproject.toml b/pyproject.toml index a4eddb49..2fbbd933 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,14 +5,11 @@ description = "MCP server for LinkedIn profile, company, and job scraping with C readme = "README.md" requires-python = ">=3.12" dependencies = [ - "fastapi[standard]>=0.115.12", "httpx>=0.28.1", "inquirer>=3.4.0", "keyring>=25.6.0", "linkedin-scraper", "mcp[cli]>=1.6.0", - "mypy>=1.15.0", - "pre-commit>=4.2.0", "pyperclip>=1.9.0", ] @@ -21,3 +18,12 @@ linkedin_mcp_server = ["py.typed"] [tool.uv.sources] linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } + +[dependency-groups] +dev = [ + "mypy>=1.15.0", + "pre-commit>=4.2.0", + "pytest>=8.3.5", + "pytest-cov>=6.1.1", + "ruff>=0.11.11", +] diff --git a/uv.lock b/uv.lock index f78f0237..2b383462 100644 --- a/uv.lock +++ b/uv.lock @@ -160,6 +160,48 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "coverage" +version = "7.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, + { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, + { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, + { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, + { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, + { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, + { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, + { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, + { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, + { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, + { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, + { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, + { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, + { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, + { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, + { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, + { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, + { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, + { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, + { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, + { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, + { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, + { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, + { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, +] + [[package]] name = "cryptography" version = "44.0.3" @@ -198,15 +240,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] -[[package]] -name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, -] - [[package]] name = "editor" version = "1.6.6" @@ -220,62 +253,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, ] -[[package]] -name = "email-validator" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, -] - -[[package]] -name = "fastapi" -version = "0.115.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "starlette" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" }, -] - -[package.optional-dependencies] -standard = [ - { name = "email-validator" }, - { name = "fastapi-cli", extra = ["standard"] }, - { name = "httpx" }, - { name = "jinja2" }, - { name = "python-multipart" }, - { name = "uvicorn", extra = ["standard"] }, -] - -[[package]] -name = "fastapi-cli" -version = "0.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "rich-toolkit" }, - { name = "typer" }, - { name = "uvicorn", extra = ["standard"] }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753, upload-time = "2024-12-15T14:28:10.028Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705, upload-time = "2024-12-15T14:28:06.18Z" }, -] - -[package.optional-dependencies] -standard = [ - { name = "uvicorn", extra = ["standard"] }, -] - [[package]] name = "filelock" version = "3.18.0" @@ -307,28 +284,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732, upload-time = "2025-04-11T14:42:44.896Z" }, ] -[[package]] -name = "httptools" -version = "0.6.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, - { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, - { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, - { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, - { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" }, - { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" }, - { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" }, - { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, -] - [[package]] name = "httpx" version = "0.28.1" @@ -355,11 +310,11 @@ wheels = [ [[package]] name = "identify" -version = "2.6.9" +version = "2.6.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249, upload-time = "2025-03-08T15:54:13.632Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101, upload-time = "2025-03-08T15:54:12.026Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, ] [[package]] @@ -371,6 +326,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + [[package]] name = "inquirer" version = "3.4.0" @@ -427,18 +391,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, ] -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, -] - [[package]] name = "jinxed" version = "1.3.0" @@ -473,28 +425,40 @@ name = "linkedin-mcp-server" version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "fastapi", extra = ["standard"] }, { name = "httpx" }, { name = "inquirer" }, { name = "keyring" }, { name = "linkedin-scraper" }, { name = "mcp", extra = ["cli"] }, + { name = "pyperclip" }, +] + +[package.dev-dependencies] +dev = [ { name = "mypy" }, { name = "pre-commit" }, - { name = "pyperclip" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, ] [package.metadata] requires-dist = [ - { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, + { name = "pyperclip", specifier = ">=1.9.0" }, +] + +[package.metadata.requires-dev] +dev = [ { name = "mypy", specifier = ">=1.15.0" }, { name = "pre-commit", specifier = ">=4.2.0" }, - { name = "pyperclip", specifier = ">=1.9.0" }, + { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-cov", specifier = ">=6.1.1" }, + { name = "ruff", specifier = ">=0.11.11" }, ] [[package]] @@ -561,44 +525,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, ] -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, -] - [[package]] name = "mcp" version = "1.6.0" @@ -669,11 +595,11 @@ wheels = [ [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload-time = "2023-02-04T12:11:27.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload-time = "2023-02-04T12:11:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] [[package]] @@ -697,13 +623,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" }, ] +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + [[package]] name = "platformdirs" -version = "4.3.7" +version = "4.3.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291, upload-time = "2025-03-19T20:36:10.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499, upload-time = "2025-03-19T20:36:09.038Z" }, + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] [[package]] @@ -826,21 +770,40 @@ wheels = [ ] [[package]] -name = "python-dotenv" -version = "1.1.0" +name = "pytest" +version = "8.3.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857, upload-time = "2025-04-05T14:07:51.592Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841, upload-time = "2025-04-05T14:07:49.641Z" }, ] [[package]] -name = "python-multipart" -version = "0.0.20" +name = "python-dotenv" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] [[package]] @@ -916,17 +879,28 @@ wheels = [ ] [[package]] -name = "rich-toolkit" -version = "0.14.4" +name = "ruff" +version = "0.11.11" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/33/18332e1359803ae6407a1e605a6bdb253a426ffe931555f1299f9e39eece/rich_toolkit-0.14.4.tar.gz", hash = "sha256:db256cf45165cae381c9bbf3b48a0fd4d99a07c80155cc655c80212a62e28fe1", size = 104487, upload-time = "2025-04-29T19:43:36.904Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/53/ae4857030d59286924a8bdb30d213d6ff22d8f0957e738d0289990091dd8/ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d", size = 4186707, upload-time = "2025-05-22T19:19:34.363Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/48/c6d43d4c56c45c0171c771b2b73deeec493efb57795b651319201e7c4638/rich_toolkit-0.14.4-py3-none-any.whl", hash = "sha256:cc71ebee83eaa122d8e42882408bc5a4bf0240bbf1e368811ee56d249b3d742a", size = 24258, upload-time = "2025-04-29T19:43:35.502Z" }, + { url = "https://files.pythonhosted.org/packages/b1/14/f2326676197bab099e2a24473158c21656fbf6a207c65f596ae15acb32b9/ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092", size = 10229049, upload-time = "2025-05-22T19:18:45.516Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f3/bff7c92dd66c959e711688b2e0768e486bbca46b2f35ac319bb6cce04447/ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4", size = 11053601, upload-time = "2025-05-22T19:18:49.269Z" }, + { url = "https://files.pythonhosted.org/packages/e2/38/8e1a3efd0ef9d8259346f986b77de0f62c7a5ff4a76563b6b39b68f793b9/ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd", size = 10367421, upload-time = "2025-05-22T19:18:51.754Z" }, + { url = "https://files.pythonhosted.org/packages/b4/50/557ad9dd4fb9d0bf524ec83a090a3932d284d1a8b48b5906b13b72800e5f/ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6", size = 10581980, upload-time = "2025-05-22T19:18:54.011Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b2/e2ed82d6e2739ece94f1bdbbd1d81b712d3cdaf69f0a1d1f1a116b33f9ad/ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4", size = 10089241, upload-time = "2025-05-22T19:18:56.041Z" }, + { url = "https://files.pythonhosted.org/packages/3d/9f/b4539f037a5302c450d7c695c82f80e98e48d0d667ecc250e6bdeb49b5c3/ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac", size = 11699398, upload-time = "2025-05-22T19:18:58.248Z" }, + { url = "https://files.pythonhosted.org/packages/61/fb/32e029d2c0b17df65e6eaa5ce7aea5fbeaed22dddd9fcfbbf5fe37c6e44e/ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709", size = 12427955, upload-time = "2025-05-22T19:19:00.981Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e3/160488dbb11f18c8121cfd588e38095ba779ae208292765972f7732bfd95/ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8", size = 12069803, upload-time = "2025-05-22T19:19:03.258Z" }, + { url = "https://files.pythonhosted.org/packages/ff/16/3b006a875f84b3d0bff24bef26b8b3591454903f6f754b3f0a318589dcc3/ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b", size = 11242630, upload-time = "2025-05-22T19:19:05.871Z" }, + { url = "https://files.pythonhosted.org/packages/65/0d/0338bb8ac0b97175c2d533e9c8cdc127166de7eb16d028a43c5ab9e75abd/ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875", size = 11507310, upload-time = "2025-05-22T19:19:08.584Z" }, + { url = "https://files.pythonhosted.org/packages/6f/bf/d7130eb26174ce9b02348b9f86d5874eafbf9f68e5152e15e8e0a392e4a3/ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1", size = 10441144, upload-time = "2025-05-22T19:19:13.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/f3/4be2453b258c092ff7b1761987cf0749e70ca1340cd1bfb4def08a70e8d8/ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81", size = 10081987, upload-time = "2025-05-22T19:19:15.821Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6e/dfa4d2030c5b5c13db158219f2ec67bf333e8a7748dccf34cfa2a6ab9ebc/ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639", size = 11073922, upload-time = "2025-05-22T19:19:18.104Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f4/f7b0b0c3d32b593a20ed8010fa2c1a01f2ce91e79dda6119fcc51d26c67b/ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345", size = 11568537, upload-time = "2025-05-22T19:19:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d2/46/0e892064d0adc18bcc81deed9aaa9942a27fd2cd9b1b7791111ce468c25f/ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112", size = 10536492, upload-time = "2025-05-22T19:19:23.642Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d9/232e79459850b9f327e9f1dc9c047a2a38a6f9689e1ec30024841fc4416c/ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f", size = 11612562, upload-time = "2025-05-22T19:19:27.013Z" }, + { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951, upload-time = "2025-05-22T19:19:30.043Z" }, ] [[package]] @@ -1126,85 +1100,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload-time = "2024-12-15T13:33:27.467Z" }, ] -[package.optional-dependencies] -standard = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "httptools" }, - { name = "python-dotenv" }, - { name = "pyyaml" }, - { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, - { name = "watchfiles" }, - { name = "websockets" }, -] - -[[package]] -name = "uvloop" -version = "0.21.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, - { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, - { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, - { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, - { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, - { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, -] - [[package]] name = "virtualenv" -version = "20.30.0" +version = "20.31.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/e0/633e369b91bbc664df47dcb5454b6c7cf441e8f5b9d0c250ce9f0546401e/virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", size = 4346945, upload-time = "2025-03-31T16:33:29.185Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/ed/3cfeb48175f0671ec430ede81f628f9fb2b1084c9064ca67ebe8c0ed6a05/virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6", size = 4329461, upload-time = "2025-03-31T16:33:26.758Z" }, -] - -[[package]] -name = "watchfiles" -version = "1.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload-time = "2025-04-08T10:36:26.722Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511, upload-time = "2025-04-08T10:35:17.956Z" }, - { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715, upload-time = "2025-04-08T10:35:19.202Z" }, - { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138, upload-time = "2025-04-08T10:35:20.586Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592, upload-time = "2025-04-08T10:35:21.87Z" }, - { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532, upload-time = "2025-04-08T10:35:23.143Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865, upload-time = "2025-04-08T10:35:24.702Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887, upload-time = "2025-04-08T10:35:25.969Z" }, - { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498, upload-time = "2025-04-08T10:35:27.353Z" }, - { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663, upload-time = "2025-04-08T10:35:28.685Z" }, - { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410, upload-time = "2025-04-08T10:35:30.42Z" }, - { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965, upload-time = "2025-04-08T10:35:32.023Z" }, - { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693, upload-time = "2025-04-08T10:35:33.225Z" }, - { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287, upload-time = "2025-04-08T10:35:34.568Z" }, - { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531, upload-time = "2025-04-08T10:35:35.792Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417, upload-time = "2025-04-08T10:35:37.048Z" }, - { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423, upload-time = "2025-04-08T10:35:38.357Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185, upload-time = "2025-04-08T10:35:39.708Z" }, - { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696, upload-time = "2025-04-08T10:35:41.469Z" }, - { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327, upload-time = "2025-04-08T10:35:43.289Z" }, - { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741, upload-time = "2025-04-08T10:35:44.574Z" }, - { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995, upload-time = "2025-04-08T10:35:46.336Z" }, - { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693, upload-time = "2025-04-08T10:35:48.161Z" }, - { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677, upload-time = "2025-04-08T10:35:49.65Z" }, - { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804, upload-time = "2025-04-08T10:35:51.093Z" }, - { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087, upload-time = "2025-04-08T10:35:52.458Z" }, + { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, ] [[package]] @@ -1225,37 +1132,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, ] -[[package]] -name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, -] - [[package]] name = "wsproto" version = "1.2.0" From 567036d12e608af62175698703788dac226991ae Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Tue, 27 May 2025 17:24:57 -0400 Subject: [PATCH 032/565] Update README.md --- README.md | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 68d2be79..6b92e81c 100644 --- a/README.md +++ b/README.md @@ -42,16 +42,8 @@ curl -LsSf https://astral.sh/uv/install.sh | sh # 3. Install the project and all dependencies uv sync - -# 4. Run the server for initial setup (this will prompt you for credentials) -uv run main.py --no-lazy-init --no-headless ``` -That's it! UV will: -- Automatically create a virtual environment -- Install all dependencies from `pyproject.toml` -- Handle the [LinkedIn scraper](https://github.com/joeyism/linkedin_scraper) git dependency - #### For Development If you want to contribute or modify the code: @@ -91,7 +83,7 @@ ChromeDriver is required for Selenium to interact with Chrome. You need to insta ### Quick Start -After installation, simply run: +After installation, run: ```bash # Start the server (first time setup) @@ -104,8 +96,8 @@ uv run main.py --no-lazy-init --no-headless # Normal operation (lazy initialization) uv run main.py -# Debug mode with visible browser -uv run main.py --no-headless --debug +# Debug mode with visible browser and direct startup +uv run main.py --no-headless --debug --no-lazy-init # Skip setup prompts (for automation) uv run main.py --no-setup @@ -146,7 +138,7 @@ uv run main.py --no-setup export LINKEDIN_EMAIL=your.email@example.com export LINKEDIN_PASSWORD=your_password ``` - - Alternatively, you can run the server once manually and you'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.) + - Alternatively, you can run the server once manually. You'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.). ## โš™๏ธ Configuration System @@ -210,7 +202,7 @@ Once specified, the ChromeDriver path is used for the current session but not st ## ๐Ÿ”„ Using with Claude Desktop -1. **After adding the configuration** to Claude Desktop, restart the application +1. **After adding the configuration** to Claude Desktop, restart Claude Desktop. The tools should be listed in the settings icon menu. 2. **Start a conversation** with Claude 3. **You'll see tools available** in the tools menu (settings icon) 4. **You can now ask Claude** to retrieve LinkedIn profiles, companies, and job details From eb103d052ba17f3dfda7b32d38f0365a8e2a7872 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 29 May 2025 20:24:42 -0400 Subject: [PATCH 033/565] Update README.md --- README.md | 40 ++++++++++++---------------------------- 1 file changed, 12 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 6b92e81c..1ecb1481 100644 --- a/README.md +++ b/README.md @@ -8,20 +8,20 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn thr https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 -## ๐Ÿ“‹ Features & Tool Status +## Features & Tool Status -### โœ… **Working Tools** +### Working Tools - **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections - **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details (employees optional) - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs - **Session Management** (`close_session`): Properly close browser sessions and clean up resources -### โš ๏ธ **Tools with Known Issues** +### Tools with Known Issues - **Job Search** (`search_jobs`): Currently experiencing ChromeDriver compatibility issues with LinkedIn's search interface - **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods - **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) -## ๐Ÿ”ง Installation +## Installation ### Prerequisites @@ -79,7 +79,7 @@ ChromeDriver is required for Selenium to interact with Chrome. You need to insta ``` - **Option 3**: The server will attempt to auto-detect or prompt you for the path when run -## ๐Ÿš€ Running the Server +## Running the Server ### Quick Start @@ -140,7 +140,7 @@ uv run main.py --no-setup ``` - Alternatively, you can run the server once manually. You'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.). -## โš™๏ธ Configuration System +## Configuration System ### Configuration Hierarchy @@ -200,31 +200,26 @@ The ChromeDriver path is found in this order: Once specified, the ChromeDriver path is used for the current session but not stored persistently. -## ๐Ÿ”„ Using with Claude Desktop +## Using with Claude Desktop 1. **After adding the configuration** to Claude Desktop, restart Claude Desktop. The tools should be listed in the settings icon menu. 2. **Start a conversation** with Claude 3. **You'll see tools available** in the tools menu (settings icon) 4. **You can now ask Claude** to retrieve LinkedIn profiles, companies, and job details -### โœ… **Recommended Usage Examples** +### Recommended Usage Examples - "Can you tell me about Daniel's work experience? His LinkedIn profile is https://www.linkedin.com/in/stickerdaniel/" - "Get details about this job posting: https://www.linkedin.com/jobs/view/1234567890" -- "Tell me about Electric Mind as a company based on their LinkedIn page" -- "Get the profile information for this LinkedIn user: [profile URL]" +- "Tell me about the company Google based on their LinkedIn page." -### โš ๏ธ **Usage Examples with Known Issues** -- ~~"Search for machine learning engineer jobs on LinkedIn"~~ (currently not working due to browser automation issues) -- ~~"Show me recommended jobs from LinkedIn"~~ (has compatibility issues) - -## ๐Ÿ” Security and Privacy +## Security and Privacy - Your LinkedIn credentials are securely stored in your system's native keychain/credential manager with user-only permissions - Credentials are never exposed to Claude or any other AI and are only used for the LinkedIn login to scrape data - The server runs on your local machine, not in the cloud - All LinkedIn scraping happens through your account - be aware that profile visits are visible to other users -## โš ๏ธ Troubleshooting +## Troubleshooting ### Tool-Specific Issues @@ -236,17 +231,6 @@ Once specified, the ChromeDriver path is used for the current session but not st **Recommended Jobs (`get_recommended_jobs`) Errors:** - Contains outdated Selenium methods (`find_elements_by_class_name`) - LinkedIn has updated their DOM structure -- Use manual job discovery and direct URLs as workaround - -**Empty Job Descriptions:** -- Job content may be dynamically loaded -- Some job postings have protected content -- Try accessing the job URL directly in a browser first - -**Company Profile Access Issues:** -- Some companies restrict automated access -- Corporate LinkedIn pages may have different structures -- Try with well-known company URLs first ### ChromeDriver Issues @@ -275,7 +259,7 @@ If Claude cannot connect to the server: ## License -This project is licensed under the MIT License - see the LICENSE file for details. +This project is licensed under the MIT License ## Acknowledgements From 55c69146babdef60de38fe46a260e99af07b1e61 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 1 Jun 2025 01:41:04 -0400 Subject: [PATCH 034/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1ecb1481..6e4f5bb5 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ### Working Tools - **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections -- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details (employees optional) +- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs - **Session Management** (`close_session`): Properly close browser sessions and clean up resources From a2ce57cf13a9dc6177e8c57ead295bbcb9d1ea14 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sat, 28 Jun 2025 14:35:58 -0400 Subject: [PATCH 035/565] Update issue templates --- .github/ISSUE_TEMPLATE/bug_report.md | 38 +++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..099f1aee --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: stickerdaniel + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..bbcbbe7d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. From 81b8a961728e523e00a9196a321e825ba8e67050 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 28 Jun 2025 16:09:51 -0400 Subject: [PATCH 036/565] feat(issue-templates): enhance bug report and feature request templates for LinkedIn MCP server --- .github/ISSUE_TEMPLATE/bug_report.md | 109 ++++++++++++++---- .github/ISSUE_TEMPLATE/config.yml | 15 +++ .github/ISSUE_TEMPLATE/documentation_issue.md | 53 +++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 27 +++-- .vscode/settings.json | 3 + 5 files changed, 171 insertions(+), 36 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/documentation_issue.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 099f1aee..ca4d2122 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,38 +1,99 @@ --- -name: Bug report -about: Create a report to help us improve -title: '' -labels: bug -assignees: stickerdaniel +name: Bug Report +about: Create a report to help us improve the LinkedIn MCP server +title: '[BUG] ' +labels: ['bug'] +assignees: '' --- +## Bug Description **Describe the bug** A clear and concise description of what the bug is. -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - **Expected behavior** A clear and concise description of what you expected to happen. -**Screenshots** -If applicable, add screenshots to help explain your problem. +**Actual behavior** +What actually happened instead. + +## MCP Configuration & Client Info +**MCP Client Used** +- [ ] Claude Desktop +- [ ] Other MCP client (specify): ___________ + +**Claude Desktop Configuration** +Please share your MCP configuration from Claude Desktop settings (remove sensitive info): +```json +{ + "mcpServers": { + "linkedin-scraper": { + // Your configuration here + } + } +} +``` + +**Transport Mode** +- [ ] stdio +- [ ] sse + +## Environment Details +**Operating System** +- [ ] macOS +- [ ] Windows +- [ ] Linux + +**Python Version** +- Python version: ___________ + +**Package Manager used** +- [ ] UV (recommended) +- [ ] pip +- [ ] Other: ___________ + +**ChromeDriver Info** +- ChromeDriver location: ___________ +- Installation method: + - [ ] Auto-detected + - [ ] Manual path specified + - [ ] Environment variable + +## Tool & LinkedIn Context +**Tool Used** +- [ ] get_person_profile +- [ ] get_company_profile +- [ ] get_job_details +- [ ] search_jobs +- [ ] get_recommended_jobs +- [ ] close_session + +**LinkedIn Context** (if applicable) +- Account type: [ ] Free [ ] Premium [ ] Sales Navigator +- Two-factor authentication enabled: [ ] Yes [ ] No +- Corporate/VPN network: [ ] Yes [ ] No + +## Error Details +**Error Messages** +``` +Paste any error messages here +``` + +**Console Output/Logs** +``` +Paste relevant console output or logs here +``` + +## Steps to Reproduce +1. Go to '...' +2. Send message '....' +3. Scroll down to '....' +4. See error -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] +## Screenshots/Videos +If applicable, add screenshots or videos to help explain your problem. -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] +## Additional Context +- Issue also occurs in `--no-headless` mode: [ ] Yes [ ] No -**Additional context** Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..1b148b56 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,15 @@ +# .github/ISSUE_TEMPLATE/config.yml +blank_issues_enabled: false +contact_links: + - name: ๐Ÿ—ฃ๏ธ General Questions & Discussion + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions + about: Ask questions about setup, usage, or get help from the community + - name: ๐Ÿ“š Share Your Setup & Get Help with Configuration + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/q-a + about: Share how you set up the MCP in your favorite client or get help with configuration + - name: ๐Ÿ’ก Ideas & Suggestions + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/ideas + about: Share ideas for new features or improvements (before creating a formal feature request) + - name: ๐Ÿš€ Show and Tell + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/show-and-tell + about: I would love to see how you're using the LinkedIn MCP server and what you're building with it! diff --git a/.github/ISSUE_TEMPLATE/documentation_issue.md b/.github/ISSUE_TEMPLATE/documentation_issue.md new file mode 100644 index 00000000..0147b010 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation_issue.md @@ -0,0 +1,53 @@ +--- +name: Documentation Issue +about: Report problems with README, setup instructions, or other documentation +title: '[DOCS] ' +labels: ['documentation'] +assignees: '' + +--- + +## Documentation Problem +**What documentation issue did you find?** +- [ ] Incorrect/outdated setup instructions +- [ ] Missing information +- [ ] Unclear/confusing explanations +- [ ] Broken links +- [ ] Example code doesn't work +- [ ] Missing prerequisites +- [ ] Inconsistent information +- [ ] Typos/grammar issues +- [ ] Other: ___________ + +## Location +**Where is the documentation issue?** +- [ ] README.md +- [ ] Installation section +- [ ] Configuration section +- [ ] Troubleshooting section +- [ ] Code comments +- [ ] Error messages +- [ ] CLI help text +- [ ] Other: ___________ + +**Specific section/line:** +___________ + +## Current Documentation +**What does the documentation currently say?** +``` +Paste the current text or link to the specific section +``` + +## Problem Description +**What's wrong or confusing about it?** +A clear description of why this documentation is problematic. + +## Suggested Fix +**What should it say instead?** +``` +Suggested replacement text or improvements +``` + +## Additional Context +Add any other context, screenshots, or examples that would help improve the documentation. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index bbcbbe7d..ed4490f7 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,20 +1,23 @@ --- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: '' +name: Feature Request +about: Suggest an idea for the LinkedIn MCP server +title: '[FEATURE] ' +labels: ['enhancement'] assignees: '' --- -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] +## What feature would you like? +Describe what you want to happen and why it would be useful. -**Describe the solution you'd like** -A clear and concise description of what you want to happen. +## Use case +How would you use this feature? +``` +Example: "Claude, get me the skills from this LinkedIn profile: [URL]" +``` -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. +## Suggested implementation +If you have a specific idea for how to implement this feature, please describe it here. -**Additional context** -Add any other context or screenshots about the feature request here. +## Additional context +Add any other details that would help. diff --git a/.vscode/settings.json b/.vscode/settings.json index 73ce5e12..d4683ead 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,4 +13,7 @@ "source.organizeImports.ruff": "explicit" } }, + "yaml.schemas": { + "https://www.schemastore.org/github-issue-config.json": "file:///Users/daniel/Documents/development/python/linkedin-mcp-server/.github/ISSUE_TEMPLATE/config.yml" + }, } From ea8c3a755b7544e4eaed034345e5de6fbacbf020 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 28 Jun 2025 16:11:32 -0400 Subject: [PATCH 037/565] fix(issue-template): update label for general questions to improve clarity --- .github/ISSUE_TEMPLATE/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 1b148b56..30b94366 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,7 +1,7 @@ # .github/ISSUE_TEMPLATE/config.yml blank_issues_enabled: false contact_links: - - name: ๐Ÿ—ฃ๏ธ General Questions & Discussion + - name: ๐Ÿ’ฌ General Questions & Discussion url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions about: Ask questions about setup, usage, or get help from the community - name: ๐Ÿ“š Share Your Setup & Get Help with Configuration From 541bc5ea7a17d06212a02460a3db5a3ed41a16b0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 28 Jun 2025 16:25:45 -0400 Subject: [PATCH 038/565] fix(issue-template): rename "Show and Tell" label for better engagement --- .github/ISSUE_TEMPLATE/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 30b94366..fea277f1 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -10,6 +10,6 @@ contact_links: - name: ๐Ÿ’ก Ideas & Suggestions url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/ideas about: Share ideas for new features or improvements (before creating a formal feature request) - - name: ๐Ÿš€ Show and Tell + - name: ๐Ÿ™Œ Show and Tell url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/show-and-tell about: I would love to see how you're using the LinkedIn MCP server and what you're building with it! From 3d587d66e0076f3a8885ce798d85bb5f2fff6c2c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 29 Jun 2025 15:21:35 -0400 Subject: [PATCH 039/565] feat(pre-commit): add additional hooks for YAML validation, large file checks, merge conflict detection, and debug statement removal --- .pre-commit-config.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 33ab114b..f38af8f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,6 +5,10 @@ repos: hooks: - id: trailing-whitespace - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: check-merge-conflict + - id: debug-statements - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. From 6f93a0706b47ed3836db96b0bc801cbdb4de13f1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 29 Jun 2025 15:21:39 -0400 Subject: [PATCH 040/565] fix(issue-template): update discussion links for better navigation and clarity --- .github/ISSUE_TEMPLATE/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index fea277f1..0e2cd75e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -2,13 +2,13 @@ blank_issues_enabled: false contact_links: - name: ๐Ÿ’ฌ General Questions & Discussion - url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/general-questions-discussion about: Ask questions about setup, usage, or get help from the community - name: ๐Ÿ“š Share Your Setup & Get Help with Configuration - url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/q-a + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/share-your-setup-get-help-with-configuration about: Share how you set up the MCP in your favorite client or get help with configuration - name: ๐Ÿ’ก Ideas & Suggestions - url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/ideas + url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/ideas-suggestions about: Share ideas for new features or improvements (before creating a formal feature request) - name: ๐Ÿ™Œ Show and Tell url: https://github.com/stickerdaniel/linkedin-mcp-server/discussions/categories/show-and-tell From 22f3546e98c67e94a7e93fd8e81820fcd2cf1bec Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 29 Jun 2025 15:50:39 -0400 Subject: [PATCH 041/565] feat(type-checking): switch from mypy to ty alpha --- .claude/settings.local.json | 9 ++++ .pre-commit-config.yaml | 10 ++-- linkedin_mcp_server/config/__init__.py | 3 +- linkedin_mcp_server/tools/job.py | 2 +- main.py | 4 +- pyproject.toml | 2 +- uv.lock | 63 +++++++++++--------------- 7 files changed, 49 insertions(+), 44 deletions(-) create mode 100644 .claude/settings.local.json diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 00000000..b9b05067 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,9 @@ +{ + "permissions": { + "allow": [ + "WebFetch(domain:github.com)", + "Bash(ty check:*)" + ], + "deny": [] + } +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f38af8f0..1ee7ed44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,11 @@ repos: args: [--fix] - id: ruff-format -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.15.0 +- repo: local hooks: - - id: mypy + - id: ty + name: ty + entry: uv run ty check + language: system + types: [python] + pass_filenames: false diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index b9cbc289..caa61510 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -22,7 +22,8 @@ def get_config() -> AppConfig: if _config is None: _config = load_config() logger.debug("Configuration loaded") - return _config + # At this point _config is guaranteed to be AppConfig, not None + return _config # type: ignore[return-value] def reset_config() -> None: diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 0e243894..88c3f027 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -99,7 +99,7 @@ async def get_recommended_jobs() -> List[Dict[str, Any]]: ) # Get recommended jobs and convert to dictionaries - if hasattr(job_search, "recommended_jobs"): + if hasattr(job_search, "recommended_jobs") and job_search.recommended_jobs: return [job.to_dict() for job in job_search.recommended_jobs] else: return [] diff --git a/main.py b/main.py index 894d171c..351dc3ea 100644 --- a/main.py +++ b/main.py @@ -6,7 +6,7 @@ import sys import logging import inquirer # type: ignore -from typing import Literal, NoReturn +from typing import Literal # Import the new centralized configuration from linkedin_mcp_server.config import get_config @@ -70,7 +70,7 @@ def main() -> None: mcp.run(transport=transport) -def exit_gracefully(exit_code: int = 0) -> NoReturn: +def exit_gracefully(exit_code: int = 0) -> None: """Exit the application gracefully, cleaning up resources.""" print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") shutdown_handler() diff --git a/pyproject.toml b/pyproject.toml index 2fbbd933..6453b6a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,9 @@ linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } [dependency-groups] dev = [ - "mypy>=1.15.0", "pre-commit>=4.2.0", "pytest>=8.3.5", "pytest-cov>=6.1.1", "ruff>=0.11.11", + "ty>=0.0.1a12", ] diff --git a/uv.lock b/uv.lock index 2b383462..5a5635e3 100644 --- a/uv.lock +++ b/uv.lock @@ -435,11 +435,11 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "mypy" }, { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "ruff" }, + { name = "ty" }, ] [package.metadata] @@ -454,11 +454,11 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ - { name = "mypy", specifier = ">=1.15.0" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "ruff", specifier = ">=0.11.11" }, + { name = "ty", specifier = ">=0.0.1a12" }, ] [[package]] @@ -568,40 +568,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] -[[package]] -name = "mypy" -version = "1.15.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mypy-extensions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717, upload-time = "2025-02-05T03:50:34.655Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981, upload-time = "2025-02-05T03:50:28.25Z" }, - { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175, upload-time = "2025-02-05T03:50:13.411Z" }, - { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675, upload-time = "2025-02-05T03:50:31.421Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020, upload-time = "2025-02-05T03:48:48.705Z" }, - { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582, upload-time = "2025-02-05T03:49:03.628Z" }, - { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614, upload-time = "2025-02-05T03:50:00.313Z" }, - { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592, upload-time = "2025-02-05T03:48:55.789Z" }, - { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611, upload-time = "2025-02-05T03:48:44.581Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443, upload-time = "2025-02-05T03:49:25.514Z" }, - { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541, upload-time = "2025-02-05T03:49:57.623Z" }, - { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348, upload-time = "2025-02-05T03:48:52.361Z" }, - { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648, upload-time = "2025-02-05T03:49:11.395Z" }, - { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload-time = "2025-02-05T03:50:08.348Z" }, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -1037,6 +1003,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221, upload-time = "2025-02-25T05:16:57.545Z" }, ] +[[package]] +name = "ty" +version = "0.0.1a12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/91/be7dfada3aec20ee06ed350d508f00a2fd47dfdcda61d76875e7cb80abfd/ty-0.0.1a12.tar.gz", hash = "sha256:41dfc8eac0b4fb735d5e101cde8c8734a3c13f670eeebc975760e6414882b702", size = 3127188, upload-time = "2025-06-25T11:50:06.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/93/b70c7f54f55de52c6a01cc16fbb0880e80b2bf4ce80f73722fe05d3db630/ty-0.0.1a12-py3-none-linux_armv6l.whl", hash = "sha256:acb0959ac54853e677a44a10bbb7b209389eac5ec4f3084705c8065625badfa3", size = 6718708, upload-time = "2025-06-25T11:49:33.281Z" }, + { url = "https://files.pythonhosted.org/packages/f6/79/5dff5e35e9c00a1ea632ef3d1844b989e0674a2871d30314cab147e51618/ty-0.0.1a12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:65da32147fac319ee4ca08af25e363ba8ebe461268e13dc3b09fcdd74974e338", size = 6837580, upload-time = "2025-06-25T11:49:35.287Z" }, + { url = "https://files.pythonhosted.org/packages/60/8c/800e21ee673a00a6360519946f45266791ffc5fd40ad3ff3ea36d1d689a6/ty-0.0.1a12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f8522efca591a621f19af89d639176f329b46d6db475510333fca92e4bc8279e", size = 6468202, upload-time = "2025-06-25T11:49:36.97Z" }, + { url = "https://files.pythonhosted.org/packages/a2/48/f3913803eb5f7a99fc449dd047b1e61735e917dc59294ff55b4637ca69d1/ty-0.0.1a12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d6627db2a8ebc12a28acf55d017f8a11e06a87f55dae4dee5677ea02dc72702", size = 6596703, upload-time = "2025-06-25T11:49:38.739Z" }, + { url = "https://files.pythonhosted.org/packages/17/bc/ad290112a7cbe4bfae9e33611971d3228314ca8bd5dc8faeb33dd015c427/ty-0.0.1a12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2876e6e947d5696511d8b185f3b45dc3f8a96c409e3fe1c05533cef0fcd9541b", size = 6577838, upload-time = "2025-06-25T11:49:40.713Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f5/2c4c26b1ebc2e0ddea6d7309133f4f48d3530b2fda14021a73aa2d596357/ty-0.0.1a12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fc31000e0f0e054c8aba92db67f1fcd73c588dab598b020789699f23fd61eff", size = 7349544, upload-time = "2025-06-25T11:49:42.517Z" }, + { url = "https://files.pythonhosted.org/packages/d4/b5/5985ad9e2a17fdf7df950a824c3b00a04086f4b4cd42a4c982d7dd2b43da/ty-0.0.1a12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6d214ad154ab9c265b268257703f46c6d4a3a5901e0e9bcbc879760a6118041", size = 7791984, upload-time = "2025-06-25T11:49:44.32Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e0/db803dbbbcefc0ff343e46330e68ec42b6963eb2dcf30bb4f00fcdd2aa1c/ty-0.0.1a12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d60a61fe04acefafc8fcd1ef2dc1383ec2cad53c4409d4223817f85a1cb3ef8a", size = 7448658, upload-time = "2025-06-25T11:49:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/47/45/cffd60dd22c8ec8a56a9de1a69ae9e7b0924f90994b6ef6d55a63656438e/ty-0.0.1a12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ad7dff29bb96bda0dec80dc494946e6cda3e377ebda755ff2d453db0211228e", size = 7323623, upload-time = "2025-06-25T11:49:48.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d4/153cbdb64ee712872959b9421bc1a4d06636946edf0aa984011dc7601230/ty-0.0.1a12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdd258c97f076de6e289cb41b3b24812ab5a562d4d0e98573bf38c195d564d92", size = 7130031, upload-time = "2025-06-25T11:49:50.21Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c6/4f20e75ec37782434b44d0b59be0011fa4ddcd2d0f2f91e7d53fb88e3e6c/ty-0.0.1a12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4b2fe76b8e7b4a066a962e839993f3422ce1391b2261afe0384b3560efce8f80", size = 6499534, upload-time = "2025-06-25T11:49:52.244Z" }, + { url = "https://files.pythonhosted.org/packages/52/ce/915232248ac9000f9122c477410b731d6c9c23e1da1c9002091f25270cd1/ty-0.0.1a12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b19ae81024646350a3bb1031c61608ed836e8cf05e8b6e1d3b6ab465abeeff80", size = 6608484, upload-time = "2025-06-25T11:49:54.305Z" }, + { url = "https://files.pythonhosted.org/packages/11/30/073cb624440173cfb40196fae9be87125e40b2d224fa9ced68df60a401cf/ty-0.0.1a12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:04cefeccc934a6389c21fd41426c271a95751e88544eb70f64953a8caa5306f8", size = 7012818, upload-time = "2025-06-25T11:49:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/d3/13/cae962003ffd8c56cafcb1e466bb262692abef1a4a5ad95b602111ff410c/ty-0.0.1a12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1f2e07a927134f7287142ff264b8862025175d2329fa2293aedddb58ac59014d", size = 7191992, upload-time = "2025-06-25T11:49:58.411Z" }, + { url = "https://files.pythonhosted.org/packages/05/cb/fe2a5bdf0f3b013798042b568893bd5a61387f03a6b4e7986b75d4a4d7ac/ty-0.0.1a12-py3-none-win32.whl", hash = "sha256:8f1571a10b5ff16eeaa91ed240ec880b2c008d9fcd106426fc904bddfc126fbc", size = 6381651, upload-time = "2025-06-25T11:50:00.112Z" }, + { url = "https://files.pythonhosted.org/packages/62/22/f7037027c07335d3f89663d196490542ff6c58191326a2c330b34cd3bf28/ty-0.0.1a12-py3-none-win_amd64.whl", hash = "sha256:6b3d8f787ef8247f5564cd86fdb182157bc99c220677988ef7f66cc6502ae83a", size = 6957120, upload-time = "2025-06-25T11:50:01.899Z" }, + { url = "https://files.pythonhosted.org/packages/72/80/82c4aca7b4246f3805a8d62b3650d574b18a848cf3f696c1d4bbdb5e613c/ty-0.0.1a12-py3-none-win_arm64.whl", hash = "sha256:5983f745cc40d15c77434d188dbce7218e2baceba88f1b8f1108763cedad81b4", size = 6572157, upload-time = "2025-06-25T11:50:03.986Z" }, +] + [[package]] name = "typer" version = "0.15.2" From edb1ca79159868b770b528734bbb0eb08c5cf449 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 29 Jun 2025 15:50:45 -0400 Subject: [PATCH 042/565] feat(vscode): update task labels and details for clarity and improved usability --- .vscode/tasks.json | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index e284e49c..6f0ca7c4 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -2,7 +2,8 @@ "version": "2.0.0", "tasks": [ { - "label": "Run pre-commit", + "label": "uv run pre-commit run --all-files", + "detail": "Run pre-commit hooks on all files", "type": "shell", "command": "uv", "args": ["run", "pre-commit", "run", "--all-files"], @@ -18,10 +19,11 @@ "problemMatcher": [] }, { - "label": "Run main.py (debug)", + "label": "uv run main.py --debug --no-headless --no-lazy-init", + "detail": "Run main.py in debug mode with visible window and login immediately", "type": "shell", "command": "uv", - "args": ["run", "main.py", "--no-headless", "--no-lazy-init", "--debug"], + "args": ["run", "main.py", "--debug", "--no-headless", "--no-lazy-init"], "group": { "kind": "build", "isDefault": false @@ -33,7 +35,8 @@ }, "problemMatcher": [] }, { - "label": "Run main.py", + "label": "uv run main.py --no-headless --no-lazy-init", + "detail": "Run main.py with visible window and login immediately", "type": "shell", "command": "uv", "args": ["run", "main.py", "--no-headless", "--no-lazy-init"], @@ -49,7 +52,8 @@ "problemMatcher": [] }, { - "label": "Follow Logs", + "label": "uv run tail -n 20 -F ~/Library/Logs/Claude/mcp*.log", + "detail": "Follow Claude Desktop logs", "type": "shell", "command": "tail", "args": ["-n", "20", "-F", "~/Library/Logs/Claude/mcp*.log"], From 73c50c564d0f129db174eae89378ad2d1b5db41b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 05:21:21 -0400 Subject: [PATCH 043/565] feat(docker): Docker setup with Selenium Grid --- .claude/settings.local.json | 1 + .dockerignore | 154 ++++++++++++++++++++++++++ .env.example | 6 + Dockerfile | 45 +++----- README.md | 45 ++++++-- docker-compose.yml | 36 ++++++ linkedin_mcp_server/drivers/chrome.py | 51 ++++++--- 7 files changed, 285 insertions(+), 53 deletions(-) create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 docker-compose.yml diff --git a/.claude/settings.local.json b/.claude/settings.local.json index b9b05067..dea752b9 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -2,6 +2,7 @@ "permissions": { "allow": [ "WebFetch(domain:github.com)", + "WebFetch(domain:docs.astral.sh)", "Bash(ty check:*)" ], "deny": [] diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..f36c54d1 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,154 @@ +# Based on .gitignore with Docker-specific additions + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pipenv +#Pipfile.lock + +# poetry +#poetry.lock + +# pdm +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582 +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Cursor +.cursorignore +.cursorindexingignore +.cursor + +# Docker-specific exclusions +.git +.github +README.md +.DS_Store diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..0ee40f97 --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +# LinkedIn credentials +LINKEDIN_EMAIL=your.email@example.com +LINKEDIN_PASSWORD=your_password + +# Selenium configuration (for Docker) +SELENIUM_REMOTE_URL=http://selenium:4444/wd/hub diff --git a/Dockerfile b/Dockerfile index 58302a68..3c05bce1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,26 @@ -# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile -FROM python:3.12-slim +FROM python:3.12-alpine -# Install Chrome dependencies -RUN apt-get update && apt-get install -y \ - wget \ - unzip \ - libnss3 \ - libgconf-2-4 \ - libxi6 \ - libgdk-pixbuf2.0-0 \ - libxrandr2 \ - ca-certificates \ - fonts-liberation \ - libappindicator3-1 \ - libasound2 \ - libatk-bridge2.0-0 \ - libatk1.0-0 \ - libgtk-3-0 \ - && rm -rf /var/lib/apt/lists/* +# Install system dependencies +RUN apk add --no-cache \ + git \ + curl -# Set work directory +# Install uv from official image +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Set working directory WORKDIR /app -# Copy the project files +# Copy project files COPY . /app -# Upgrade pip and install build dependencies -RUN pip install --upgrade pip \ - && pip install --no-cache-dir . +# Sync dependencies and install project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen -# Expose any ports if necessary (MCP likely communicates via stdio so no port exposure) +# Create a non-root user +RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app +USER mcpuser -# Set default command to run the MCP server -CMD ["python", "main.py", "--no-setup"] +# Default command +CMD ["uv", "run", "python", "main.py", "--no-setup"] diff --git a/README.md b/README.md index 6e4f5bb5..03227f0f 100644 --- a/README.md +++ b/README.md @@ -23,30 +23,55 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ## Installation -### Prerequisites +Choose your installation method: -- Python 3.12 or higher -- Chrome browser installed -- ChromeDriver matching your Chrome version (we'll help you set this up) +**๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed +**๐Ÿ”ง [Local Installation](#local-installation-with-chromedriver)** - Install ChromeDriver manually + +--- + +### Docker Installation (Recommended) + +No ChromeDriver setup required - uses Selenium Grid in containers. + +```bash +# 1. Clone and setup +git clone https://github.com/stickerdaniel/linkedin-mcp-server +cd linkedin-mcp-server +cp .env.example .env + +# 2. Add your LinkedIn credentials to .env + +# 3. Start services +docker-compose up --build +``` + +--- + +### Local Installation (with ChromeDriver) + +**Prerequisites:** +- [Chrome browser](https://www.google.com/chrome/) installed - A LinkedIn account -### Quick Start (Recommended) +**Setup:** ```bash # 1. Clone the repository git clone https://github.com/stickerdaniel/linkedin-mcp-server cd linkedin-mcp-server -# 2. Install UV if you don't have it +# 2.1 Install UV if you don't have it curl -LsSf https://astral.sh/uv/install.sh | sh +# 2.2 Install python if you don't have it +uv python install # 3. Install the project and all dependencies uv sync ``` #### For Development - -If you want to contribute or modify the code: +If you plan to modify the code and contribute (feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues?q=sort%3Aupdated-desc+is%3Aissue+is%3Aopen) / [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen)!): ```bash # Install with development dependencies @@ -99,7 +124,7 @@ uv run main.py # Debug mode with visible browser and direct startup uv run main.py --no-headless --debug --no-lazy-init -# Skip setup prompts (for automation) +# Skip setup prompts (for your mcp client to start the server after you've configured it once) uv run main.py --no-setup ``` @@ -161,8 +186,6 @@ Configuration values are loaded with the following precedence (highest to lowest 3. **System keychain**: Securely stored credentials from previous sessions -4. **Default values**: Built-in fallback values - ### Command-line Options | Option | Description | diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..471a5ca4 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,36 @@ +# Docker Compose configuration for LinkedIn MCP Server with Selenium Grid + +services: + selenium: + image: selenium/standalone-chromium:latest + container_name: linkedin-selenium + ports: + - "4444:4444" # Selenium Grid + - "7900:7900" # VNC (optional for debugging) + shm_size: 2g + environment: + - SE_NODE_MAX_SESSIONS=1 + - SE_NODE_SESSION_TIMEOUT=300 + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:4444/wd/hub/status"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + linkedin-mcp: + build: . + container_name: linkedin-mcp-server + depends_on: + selenium: + condition: service_healthy + environment: + - SELENIUM_REMOTE_URL=http://selenium:4444/wd/hub + - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} + - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} + volumes: + - .:/app + working_dir: /app + command: ["uv", "run", "python", "main.py", "--no-setup"] + stdin_open: true + tty: true diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 4079e45b..a1f62e78 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -6,7 +6,7 @@ """ import sys -from typing import Dict, Optional +from typing import Dict, Optional, Union import os from selenium import webdriver from selenium.webdriver.chrome.options import Options @@ -18,16 +18,16 @@ from linkedin_mcp_server.config.providers import clear_credentials_from_keyring # Global driver storage to reuse sessions -active_drivers: Dict[str, webdriver.Chrome] = {} +active_drivers: Dict[str, Union[webdriver.Chrome, webdriver.Remote]] = {} -def get_or_create_driver() -> Optional[webdriver.Chrome]: +def get_or_create_driver() -> Optional[Union[webdriver.Chrome, webdriver.Remote]]: """ Get existing driver or create a new one using the configured settings. Returns: - Optional[webdriver.Chrome]: Chrome WebDriver instance or None if initialization fails - in non-interactive mode + Optional[Union[webdriver.Chrome, webdriver.Remote]]: WebDriver instance or None if initialization fails + in non-interactive mode Raises: WebDriverException: If the driver cannot be created and not in non-interactive mode @@ -47,11 +47,13 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: if config.chrome.headless: chrome_options.add_argument("--headless=new") - # Add additional options for stability + # Add essential options for stability (compatible with both Grid and direct) chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--disable-dev-shm-usage") chrome_options.add_argument("--disable-gpu") chrome_options.add_argument("--window-size=1920,1080") + chrome_options.add_argument("--disable-extensions") + chrome_options.add_argument("--disable-background-timer-throttling") chrome_options.add_argument( "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36" ) @@ -62,13 +64,32 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: # Initialize Chrome driver try: - if config.chrome.chromedriver_path: - print(f"๐ŸŒ Using ChromeDriver at path: {config.chrome.chromedriver_path}") - service = Service(executable_path=config.chrome.chromedriver_path) - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - print("๐ŸŒ Using auto-detected ChromeDriver") - driver = webdriver.Chrome(options=chrome_options) + # Check for remote Selenium URL (Docker environment) + selenium_url = os.environ.get( + "SELENIUM_REMOTE_URL", "http://localhost:4444/wd/hub" + ) + + # First, try to connect to Selenium Grid (Docker or remote) + try: + print(f"๐ŸŒ Attempting to connect to Selenium Grid at {selenium_url}...") + driver = webdriver.Remote( + command_executor=selenium_url, options=chrome_options + ) + print("โœ… Connected to Selenium Grid successfully") + except Exception as grid_error: + print(f"โš ๏ธ Selenium Grid not available at {selenium_url}: {grid_error}") + print("๐ŸŒ Falling back to local ChromeDriver...") + + # Fallback to local ChromeDriver + if config.chrome.chromedriver_path: + print( + f"๐ŸŒ Using ChromeDriver at path: {config.chrome.chromedriver_path}" + ) + service = Service(executable_path=config.chrome.chromedriver_path) + driver = webdriver.Chrome(service=service, options=chrome_options) + else: + print("๐ŸŒ Using auto-detected ChromeDriver") + driver = webdriver.Chrome(options=chrome_options) # Add a page load timeout for safety driver.set_page_load_timeout(60) @@ -94,12 +115,12 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: raise WebDriverException(error_msg) -def login_to_linkedin(driver: webdriver.Chrome) -> bool: +def login_to_linkedin(driver: Union[webdriver.Chrome, webdriver.Remote]) -> bool: """ Log in to LinkedIn using stored or provided credentials. Args: - driver: Chrome WebDriver instance + driver: WebDriver instance (Chrome or Remote) Returns: bool: True if login was successful, False otherwise From 0db47fc69674de74c03eb9bc668472bc4dd5d329 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 30 Jun 2025 15:29:03 -0400 Subject: [PATCH 044/565] Update README.md --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 03227f0f..85143973 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,5 @@ # LinkedIn MCP Server -[![smithery badge](https://smithery.ai/badge/@stickerdaniel/linkedin-mcp-server)](https://smithery.ai/server/@stickerdaniel/linkedin-mcp-server) - A Model Context Protocol (MCP) server that enables interaction with LinkedIn through Claude and other AI assistants. This server allows you to scrape LinkedIn profiles, companies, jobs, and perform job searches. From e3e359a327a524de19183f30e3fc77006cad0f86 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 30 Jun 2025 15:29:38 -0400 Subject: [PATCH 045/565] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 85143973..4705d9d5 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 Choose your installation method: **๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed + **๐Ÿ”ง [Local Installation](#local-installation-with-chromedriver)** - Install ChromeDriver manually --- From 92174c109aef8ace889b7522e544393f421ecafd Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 30 Jun 2025 15:29:48 -0400 Subject: [PATCH 046/565] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 4705d9d5..85143973 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,6 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 Choose your installation method: **๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed - **๐Ÿ”ง [Local Installation](#local-installation-with-chromedriver)** - Install ChromeDriver manually --- From 6dabc9d64bf263298720ef409fefade4bf7cdb1c Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 30 Jun 2025 15:30:02 -0400 Subject: [PATCH 047/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 85143973..3b6485a2 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 Choose your installation method: -**๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed +**๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed **๐Ÿ”ง [Local Installation](#local-installation-with-chromedriver)** - Install ChromeDriver manually --- From 7a4e8a20c13d3d1eb3118a692ba32b796693475f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 19:06:41 -0400 Subject: [PATCH 048/565] feat(docker): Improved Docker setup with a single container --- .claude/settings.local.json | 3 +- .env.example | 6 - Dockerfile | 8 +- README.md | 357 ++++++++++---------------- docker-compose.yml | 36 --- linkedin_mcp_server/drivers/chrome.py | 50 ++-- 6 files changed, 159 insertions(+), 301 deletions(-) delete mode 100644 .env.example delete mode 100644 docker-compose.yml diff --git a/.claude/settings.local.json b/.claude/settings.local.json index dea752b9..eddd25c8 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -3,7 +3,8 @@ "allow": [ "WebFetch(domain:github.com)", "WebFetch(domain:docs.astral.sh)", - "Bash(ty check:*)" + "Bash(ty check:*)", + "Bash(./build.sh)" ], "deny": [] } diff --git a/.env.example b/.env.example deleted file mode 100644 index 0ee40f97..00000000 --- a/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -# LinkedIn credentials -LINKEDIN_EMAIL=your.email@example.com -LINKEDIN_PASSWORD=your_password - -# Selenium configuration (for Docker) -SELENIUM_REMOTE_URL=http://selenium:4444/wd/hub diff --git a/Dockerfile b/Dockerfile index 3c05bce1..7c7b2e4c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,11 @@ FROM python:3.12-alpine -# Install system dependencies +# Install system dependencies including Chromium and ChromeDriver RUN apk add --no-cache \ git \ - curl + curl \ + chromium \ + chromium-chromedriver # Install uv from official image COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ @@ -23,4 +25,4 @@ RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser # Default command -CMD ["uv", "run", "python", "main.py", "--no-setup"] +CMD ["uv", "run", "python", "main.py", "--no-setup", "--no-lazy-init"] diff --git a/README.md b/README.md index 3b6485a2..7bfc1e6b 100644 --- a/README.md +++ b/README.md @@ -19,274 +19,181 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 - **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods - **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) -## Installation +## ๐ŸŽฏ Usage Examples -Choose your installation method: - -**๐Ÿ“ฆ [Docker Installation](#docker-installation-recommended)** - No ChromeDriver setup needed -**๐Ÿ”ง [Local Installation](#local-installation-with-chromedriver)** - Install ChromeDriver manually - ---- - -### Docker Installation (Recommended) - -No ChromeDriver setup required - uses Selenium Grid in containers. - -```bash -# 1. Clone and setup -git clone https://github.com/stickerdaniel/linkedin-mcp-server -cd linkedin-mcp-server -cp .env.example .env - -# 2. Add your LinkedIn credentials to .env - -# 3. Start services -docker-compose up --build ``` - ---- - -### Local Installation (with ChromeDriver) - -**Prerequisites:** -- [Chrome browser](https://www.google.com/chrome/) installed -- A LinkedIn account - -**Setup:** - -```bash -# 1. Clone the repository -git clone https://github.com/stickerdaniel/linkedin-mcp-server -cd linkedin-mcp-server - -# 2.1 Install UV if you don't have it -curl -LsSf https://astral.sh/uv/install.sh | sh -# 2.2 Install python if you don't have it -uv python install - -# 3. Install the project and all dependencies -uv sync +Get Daniel's profile https://www.linkedin.com/in/stickerdaniel/ ``` - -#### For Development -If you plan to modify the code and contribute (feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues?q=sort%3Aupdated-desc+is%3Aissue+is%3Aopen) / [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen)!): - -```bash -# Install with development dependencies -uv sync --group dev - -# Install pre-commit hooks -uv run pre-commit install +``` +Analyze this company https://www.linkedin.com/company/docker/ +``` +``` +Get details about this job posting https://www.linkedin.com/jobs/view/123456789 ``` -### ChromeDriver Setup - -ChromeDriver is required for Selenium to interact with Chrome. You need to install the version that matches your Chrome browser. - -1. **Check your Chrome version**: - - Open Chrome and go to the menu (three dots) > Help > About Google Chrome - - Note the version number (e.g., 123.0.6312.87) - -2. **Download matching ChromeDriver**: - - Go to [ChromeDriver Downloads](https://chromedriver.chromium.org/downloads) / [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) (Chrome-Version 115+) - - Download the version that matches your Chrome version - - Extract the downloaded file +The server automatically handles login, navigation, and data extraction. -3. **Make ChromeDriver accessible**: - - **Option 1**: Place it in a directory that's in your PATH (e.g., `/usr/local/bin` on macOS/Linux) - - **Option 2**: Set the CHROMEDRIVER environment variable to the path where you placed it: - ```bash - export CHROMEDRIVER=/path/to/chromedriver # macOS/Linux - # OR - set CHROMEDRIVER=C:\path\to\chromedriver.exe # Windows - ``` - - **Option 3**: The server will attempt to auto-detect or prompt you for the path when run +## Installation Methods -## Running the Server +Choose your preferred installation method: -### Quick Start +[![Install with Claude Desktop](https://img.shields.io/badge/Claude_Desktop-One_Click_Install-blue?style=for-the-badge&logo=anthropic)](https://claude.ai/install-mcp?name=linkedin&config=eyJjb21tYW5kIjoiZG9ja2VyIiwiYXJncyI6WyJydW4iLCItaSIsIi0tcm0iLCItZSIsIkxJTktFRElOX0VNQUlMIiwiLWUiLCJMSU5LRURJTl9QQVNTV09SRCIsIm1jcC9saW5rZWRpbiJdfQ%3D%3D) +[![Docker Hub](https://img.shields.io/badge/Docker_Hub-mcp/linkedin-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/mcp/linkedin) +[![Contributors](https://img.shields.io/badge/Contributors-Local_Setup-green?style=for-the-badge&logo=github)](https://github.com/stickerdaniel/linkedin-mcp-server#%EF%B8%8F-local-setup-contributors-only) -After installation, run: +--- -```bash -# Start the server (first time setup) -uv run main.py --no-lazy-init --no-headless +## ๐Ÿณ Docker Setup (Recommended) + +**Zero setup required** - no Chrome installation, no ChromeDriver management, no dependencies. + +### Installation + +**Claude Desktop:** +```json +{ + "mcpServers": { + "linkedin": { + "command": "docker", + "args": [ + "run", "-i", "--rm", + "-e", "LINKEDIN_EMAIL", + "-e", "LINKEDIN_PASSWORD", + "mcp/linkedin" + ], + "env": { + "LINKEDIN_EMAIL": "your.email@example.com", + "LINKEDIN_PASSWORD": "your_password" + } + } + } +} ``` -### Running Options +
+๐Ÿณ Manual Docker Usage ```bash -# Normal operation (lazy initialization) -uv run main.py - -# Debug mode with visible browser and direct startup -uv run main.py --no-headless --debug --no-lazy-init - -# Skip setup prompts (for your mcp client to start the server after you've configured it once) -uv run main.py --no-setup +docker run -i --rm \ + -e LINKEDIN_EMAIL="your.email@example.com" \ + -e LINKEDIN_PASSWORD="your_password" \ + mcp/linkedin ``` -### Configuration for Claude Desktop - -1. **The server will automatically**: - - Display the configuration needed for Claude Desktop - - Copy it to your clipboard for easy pasting - -2. **Add to Claude Desktop**: - - Open Claude Desktop and go to Settings > Developer > Edit Config - - Paste the configuration provided by the server - - Example Claude Desktop configuration: - ```json - { - "mcpServers": { - "linkedin-scraper": { - "command": "uv", - "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py", "--no-setup"], - "env": { - "LINKEDIN_EMAIL": "your.email@example.com", - "LINKEDIN_PASSWORD": "your_password" - } - } - } - } - ``` - -### Credential Management - -- **Lazy initialization (default behavior)**: - - The server uses lazy initialization, meaning it will only create the Chrome driver and log in when a tool is actually used - - You can set environment variables for non-interactive use: - ```bash - export LINKEDIN_EMAIL=your.email@example.com - export LINKEDIN_PASSWORD=your_password - ``` - - Alternatively, you can run the server once manually. You'll be prompted for credentials, which will then be stored securely in your system's keychain (macOS Keychain, Windows Credential Locker, etc.). - -## Configuration System - -### Configuration Hierarchy - -Configuration values are loaded with the following precedence (highest to lowest): - -1. **Command-line arguments**: - ```bash - uv run main.py --no-headless --debug - ``` - -2. **Environment variables**: - ```bash - export LINKEDIN_EMAIL=your.email@example.com - export LINKEDIN_PASSWORD=your_password - export CHROMEDRIVER=/path/to/chromedriver - ``` - *Note: Environment variables always override credentials stored in the system keychain* - -3. **System keychain**: Securely stored credentials from previous sessions - -### Command-line Options - -| Option | Description | -|--------|-------------| -| `--no-headless` | Run Chrome with a visible browser window | -| `--debug` | Enable debug mode with additional logging | -| `--no-setup` | Skip configuration setup prompts | -| `--no-lazy-init` | Initialize Chrome driver immediately (instead of on first use) | +
-### Credential Storage +
+๐Ÿšจ Troubleshooting -Your LinkedIn credentials are stored securely using your system's native keychain/credential manager: +**Container won't start:** +```bash +# Check Docker is running +docker ps -- **macOS**: macOS Keychain -- **Windows**: Windows Credential Locker -- **Linux**: Native keyring (varies by distribution) +# Pull latest image +docker pull mcp/linkedin +``` -Credentials are managed as follows: +**Login issues:** +- Verify credentials are correct +- Check for typos in email/password +- Check if you need to confirm the login in the mobile app -1. First, the application checks for credentials in environment variables -2. Next, it checks the system keychain for stored credentials -3. If no credentials are found, you'll be prompted to enter them (in interactive mode) -4. Entered credentials are securely stored in your system keychain for future use +
-### Clearing Stored Credentials +--- -If you need to change your stored credentials, run the application with the `--no-lazy-init` flag and when prompted about login failure, select "Yes" to try with different credentials. +## ๐Ÿ› ๏ธ Local Setup (Develop & Contribute) -### ChromeDriver Configuration +**For contributors** who want to modify and debug the code. -The ChromeDriver path is found in this order: -1. From the `CHROMEDRIVER` environment variable -2. Auto-detected from common locations -3. Manually specified when prompted (if auto-detection fails) +**Prerequisites:** +- Python 3.12 or higher +- Chrome browser installed +- ChromeDriver (see setup below) -Once specified, the ChromeDriver path is used for the current session but not stored persistently. +**ChromeDriver Setup:** +1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome +2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) +3. **Make accessible**: + - Place in PATH (`/usr/local/bin` on macOS/Linux) + - Or set: `export CHROMEDRIVER_PATH=/path/to/chromedriver` + - if no CHROMEDRIVER_PATH is set, the server will try to find it automatically by checking common locations -## Using with Claude Desktop +### Installation -1. **After adding the configuration** to Claude Desktop, restart Claude Desktop. The tools should be listed in the settings icon menu. -2. **Start a conversation** with Claude -3. **You'll see tools available** in the tools menu (settings icon) -4. **You can now ask Claude** to retrieve LinkedIn profiles, companies, and job details +```bash +# 1. Clone repository +git clone https://github.com/stickerdaniel/linkedin-mcp-server +cd linkedin-mcp-server -### Recommended Usage Examples -- "Can you tell me about Daniel's work experience? His LinkedIn profile is https://www.linkedin.com/in/stickerdaniel/" -- "Get details about this job posting: https://www.linkedin.com/jobs/view/1234567890" -- "Tell me about the company Google based on their LinkedIn page." +# 2. Install UV package manager +curl -LsSf https://astral.sh/uv/install.sh | sh +uv python # install python if you don't have it -## Security and Privacy +# 3. Install dependencies and dev dependencies +uv sync +uv sync --group dev -- Your LinkedIn credentials are securely stored in your system's native keychain/credential manager with user-only permissions -- Credentials are never exposed to Claude or any other AI and are only used for the LinkedIn login to scrape data -- The server runs on your local machine, not in the cloud -- All LinkedIn scraping happens through your account - be aware that profile visits are visible to other users +# 4. Install pre-commit hooks +uv run pre-commit install -## Troubleshooting +# 5. Start the server once manually +# (you will be prompted to enter your LinkedIn credentials, and they are securely stored in your OS keychain) +uv run main.py --no-headless --no-lazy-init +``` -### Tool-Specific Issues +
+๐Ÿ”ง Configuration + +**CLI Options:** +- `--no-headless` - Show browser window (debugging) +- `--debug` - Enable detailed logging +- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env) +- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call + +**Claude Desktop:** +```json +{ + "mcpServers": { + "linkedin": { + "command": "uv", + "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py", "--no-setup"] + } + } +} +``` -**Job Search (`search_jobs`) Not Working:** -- This tool currently has ChromeDriver compatibility issues -- Use direct job URLs with `get_job_details` instead -- LinkedIn's search interface has anti-automation measures +
-**Recommended Jobs (`get_recommended_jobs`) Errors:** -- Contains outdated Selenium methods (`find_elements_by_class_name`) -- LinkedIn has updated their DOM structure +
+๐Ÿšจ Troubleshooting -### ChromeDriver Issues +**Scraping issues:** +- Use `--no-headless` to see browser actions +- Add `--debug` to see more detailed logging -If you encounter ChromeDriver errors: -1. Ensure your Chrome browser is updated -2. Download the matching ChromeDriver version -3. Set the CHROMEDRIVER path correctly -4. Try running with administrator/sudo privileges if permission issues occur +**ChromeDriver issues:** +- Ensure Chrome and ChromeDriver versions match +- Check ChromeDriver is in PATH or set `CHROMEDRIVER_PATH` -### Authentication Issues +**Python issues:** +```bash +# Check Python version +python --version # Should be 3.12+ -If login fails: -1. Verify your LinkedIn credentials -2. Check if your account has two-factor authentication enabled -3. Try logging in manually to LinkedIn first, then run the server -4. Check your LinkedIn mobile app for a login request after running the server -5. Try to run the server with `--no-headless` to see where the login fails -6. Try to run the server with `--debug` to see more detailed logs +# Reinstall dependencies +uv sync --reinstall +``` -### Connection Issues +
-If Claude cannot connect to the server: -1. Ensure the server is running when you start it manually -2. Verify the configuration in Claude Desktop is correct -3. Restart Claude Desktop +Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! ## License -This project is licensed under the MIT License - -## Acknowledgements +MIT License -- Based on the [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by joeyism -- Uses the Model Context Protocol (MCP) for integration with AI assistants - ---- +โš ๏ธ **Important:** Use responsibly and in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. -**Note**: This tool is for personal use only. Use responsibly and in accordance with LinkedIn's terms of service. Web scraping may violate LinkedIn's terms of service. +**Acknowledgements:** Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) and [Model Context Protocol](https://modelcontextprotocol.io/). diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 471a5ca4..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,36 +0,0 @@ -# Docker Compose configuration for LinkedIn MCP Server with Selenium Grid - -services: - selenium: - image: selenium/standalone-chromium:latest - container_name: linkedin-selenium - ports: - - "4444:4444" # Selenium Grid - - "7900:7900" # VNC (optional for debugging) - shm_size: 2g - environment: - - SE_NODE_MAX_SESSIONS=1 - - SE_NODE_SESSION_TIMEOUT=300 - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:4444/wd/hub/status"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 40s - - linkedin-mcp: - build: . - container_name: linkedin-mcp-server - depends_on: - selenium: - condition: service_healthy - environment: - - SELENIUM_REMOTE_URL=http://selenium:4444/wd/hub - - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} - - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} - volumes: - - .:/app - working_dir: /app - command: ["uv", "run", "python", "main.py", "--no-setup"] - stdin_open: true - tty: true diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index a1f62e78..84e39045 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -6,7 +6,7 @@ """ import sys -from typing import Dict, Optional, Union +from typing import Dict, Optional import os from selenium import webdriver from selenium.webdriver.chrome.options import Options @@ -18,16 +18,16 @@ from linkedin_mcp_server.config.providers import clear_credentials_from_keyring # Global driver storage to reuse sessions -active_drivers: Dict[str, Union[webdriver.Chrome, webdriver.Remote]] = {} +active_drivers: Dict[str, webdriver.Chrome] = {} -def get_or_create_driver() -> Optional[Union[webdriver.Chrome, webdriver.Remote]]: +def get_or_create_driver() -> Optional[webdriver.Chrome]: """ Get existing driver or create a new one using the configured settings. Returns: - Optional[Union[webdriver.Chrome, webdriver.Remote]]: WebDriver instance or None if initialization fails - in non-interactive mode + Optional[webdriver.Chrome]: Chrome WebDriver instance or None if initialization fails + in non-interactive mode Raises: WebDriverException: If the driver cannot be created and not in non-interactive mode @@ -64,32 +64,22 @@ def get_or_create_driver() -> Optional[Union[webdriver.Chrome, webdriver.Remote] # Initialize Chrome driver try: - # Check for remote Selenium URL (Docker environment) - selenium_url = os.environ.get( - "SELENIUM_REMOTE_URL", "http://localhost:4444/wd/hub" + print("๐ŸŒ Initializing Chrome WebDriver...") + + # Use ChromeDriver path from environment or config + chromedriver_path = ( + os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path ) - # First, try to connect to Selenium Grid (Docker or remote) - try: - print(f"๐ŸŒ Attempting to connect to Selenium Grid at {selenium_url}...") - driver = webdriver.Remote( - command_executor=selenium_url, options=chrome_options - ) - print("โœ… Connected to Selenium Grid successfully") - except Exception as grid_error: - print(f"โš ๏ธ Selenium Grid not available at {selenium_url}: {grid_error}") - print("๐ŸŒ Falling back to local ChromeDriver...") + if chromedriver_path: + print(f"๐ŸŒ Using ChromeDriver at path: {chromedriver_path}") + service = Service(executable_path=chromedriver_path) + driver = webdriver.Chrome(service=service, options=chrome_options) + else: + print("๐ŸŒ Using auto-detected ChromeDriver") + driver = webdriver.Chrome(options=chrome_options) - # Fallback to local ChromeDriver - if config.chrome.chromedriver_path: - print( - f"๐ŸŒ Using ChromeDriver at path: {config.chrome.chromedriver_path}" - ) - service = Service(executable_path=config.chrome.chromedriver_path) - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - print("๐ŸŒ Using auto-detected ChromeDriver") - driver = webdriver.Chrome(options=chrome_options) + print("โœ… Chrome WebDriver initialized successfully") # Add a page load timeout for safety driver.set_page_load_timeout(60) @@ -115,12 +105,12 @@ def get_or_create_driver() -> Optional[Union[webdriver.Chrome, webdriver.Remote] raise WebDriverException(error_msg) -def login_to_linkedin(driver: Union[webdriver.Chrome, webdriver.Remote]) -> bool: +def login_to_linkedin(driver: webdriver.Chrome) -> bool: """ Log in to LinkedIn using stored or provided credentials. Args: - driver: WebDriver instance (Chrome or Remote) + driver: Chrome WebDriver instance Returns: bool: True if login was successful, False otherwise From 18f24e63470264739867c92a4ec2ca1b2291f7bd Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 20:16:04 -0400 Subject: [PATCH 049/565] fix(docker): Update Docker Hub image references in README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 7bfc1e6b..324b00ed 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ The server automatically handles login, navigation, and data extraction. Choose your preferred installation method: [![Install with Claude Desktop](https://img.shields.io/badge/Claude_Desktop-One_Click_Install-blue?style=for-the-badge&logo=anthropic)](https://claude.ai/install-mcp?name=linkedin&config=eyJjb21tYW5kIjoiZG9ja2VyIiwiYXJncyI6WyJydW4iLCItaSIsIi0tcm0iLCItZSIsIkxJTktFRElOX0VNQUlMIiwiLWUiLCJMSU5LRURJTl9QQVNTV09SRCIsIm1jcC9saW5rZWRpbiJdfQ%3D%3D) -[![Docker Hub](https://img.shields.io/badge/Docker_Hub-mcp/linkedin-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/mcp/linkedin) +[![Docker Hub](https://img.shields.io/badge/Docker_Hub-stickerdaniel/linkedin--mcp--server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) [![Contributors](https://img.shields.io/badge/Contributors-Local_Setup-green?style=for-the-badge&logo=github)](https://github.com/stickerdaniel/linkedin-mcp-server#%EF%B8%8F-local-setup-contributors-only) --- @@ -59,7 +59,7 @@ Choose your preferred installation method: "run", "-i", "--rm", "-e", "LINKEDIN_EMAIL", "-e", "LINKEDIN_PASSWORD", - "mcp/linkedin" + "stickerdaniel/linkedin-mcp-server" ], "env": { "LINKEDIN_EMAIL": "your.email@example.com", @@ -77,7 +77,7 @@ Choose your preferred installation method: docker run -i --rm \ -e LINKEDIN_EMAIL="your.email@example.com" \ -e LINKEDIN_PASSWORD="your_password" \ - mcp/linkedin + stickerdaniel/linkedin-mcp-server ``` @@ -91,7 +91,7 @@ docker run -i --rm \ docker ps # Pull latest image -docker pull mcp/linkedin +docker pull stickerdaniel/linkedin-mcp-server ``` **Login issues:** From 26998b85eb7efab9c29a2bf9a8ad9174559f8d02 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 20:27:59 -0400 Subject: [PATCH 050/565] chore(readme): Update .gitignore for Docker deployment tracking and modify README.md for clarity --- .gitignore | 3 +++ README.md | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 907cba9c..ebb668cd 100644 --- a/.gitignore +++ b/.gitignore @@ -187,3 +187,6 @@ cython_debug/ .cursorignore .cursorindexingignore .cursor + +# Docker deployment tracking +.docker/ diff --git a/README.md b/README.md index 324b00ed..709a52b7 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ Choose your preferred installation method: [![Install with Claude Desktop](https://img.shields.io/badge/Claude_Desktop-One_Click_Install-blue?style=for-the-badge&logo=anthropic)](https://claude.ai/install-mcp?name=linkedin&config=eyJjb21tYW5kIjoiZG9ja2VyIiwiYXJncyI6WyJydW4iLCItaSIsIi0tcm0iLCItZSIsIkxJTktFRElOX0VNQUlMIiwiLWUiLCJMSU5LRURJTl9QQVNTV09SRCIsIm1jcC9saW5rZWRpbiJdfQ%3D%3D) [![Docker Hub](https://img.shields.io/badge/Docker_Hub-stickerdaniel/linkedin--mcp--server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) -[![Contributors](https://img.shields.io/badge/Contributors-Local_Setup-green?style=for-the-badge&logo=github)](https://github.com/stickerdaniel/linkedin-mcp-server#%EF%B8%8F-local-setup-contributors-only) +[![Development](https://img.shields.io/badge/Contributors-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) --- From 93f58335bedc02ea7e229a165cf807dfffbd1603 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 21:23:47 -0400 Subject: [PATCH 051/565] feat(dxt): Add new Claude Desktop DXT extension and update README for installation instructions --- .gitignore | 3 + README.md | 42 +++++++---- assets/icons/linkedin.svg | 1 + assets/screenshots/screenshot.png | Bin 0 -> 428200 bytes manifest.json | 121 ++++++++++++++++++++++++++++++ 5 files changed, 152 insertions(+), 15 deletions(-) create mode 100644 assets/icons/linkedin.svg create mode 100644 assets/screenshots/screenshot.png create mode 100644 manifest.json diff --git a/.gitignore b/.gitignore index ebb668cd..ed938ce6 100644 --- a/.gitignore +++ b/.gitignore @@ -190,3 +190,6 @@ cython_debug/ # Docker deployment tracking .docker/ + +# DXT extension packages (too large for git) +*.dxt diff --git a/README.md b/README.md index 709a52b7..789adec3 100644 --- a/README.md +++ b/README.md @@ -31,21 +31,21 @@ Analyze this company https://www.linkedin.com/company/docker/ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 ``` -The server automatically handles login, navigation, and data extraction. - ## Installation Methods Choose your preferred installation method: -[![Install with Claude Desktop](https://img.shields.io/badge/Claude_Desktop-One_Click_Install-blue?style=for-the-badge&logo=anthropic)](https://claude.ai/install-mcp?name=linkedin&config=eyJjb21tYW5kIjoiZG9ja2VyIiwiYXJncyI6WyJydW4iLCItaSIsIi0tcm0iLCItZSIsIkxJTktFRElOX0VNQUlMIiwiLWUiLCJMSU5LRURJTl9QQVNTV09SRCIsIm1jcC9saW5rZWRpbiJdfQ%3D%3D) -[![Docker Hub](https://img.shields.io/badge/Docker_Hub-stickerdaniel/linkedin--mcp--server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) -[![Development](https://img.shields.io/badge/Contributors-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) +[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP_Server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) +[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-purple?style=for-the-badge&logo=anthropic)](./linkedin-mcp-server-1.0.0.dxt) +[![Development](https://img.shields.io/badge/Development-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) --- -## ๐Ÿณ Docker Setup (Recommended) +## ๐Ÿณ Docker Setup (Recommended - Universal) + +**Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. -**Zero setup required** - no Chrome installation, no ChromeDriver management, no dependencies. +**Zero setup required** - just add the mcp server to your client config and replace email and password with your linkedin credentials. ### Installation @@ -103,20 +103,31 @@ docker pull stickerdaniel/linkedin-mcp-server --- +## ๐Ÿ“ฆ Claude Desktop (DXT Extension) + +**Prerequisites:** [Claude Desktop](https://claude.ai/desktop) installed + +**One-click installation** for Claude Desktop users: +1. Download the [DXT extension](./linkedin-mcp-server-1.0.0.dxt) +2. Double-click to install into Claude Desktop +3. Configure your LinkedIn credentials when prompted +4. Start using LinkedIn tools immediately + +The extension automatically handles Docker setup and credential management. + +--- + ## ๐Ÿ› ๏ธ Local Setup (Develop & Contribute) **For contributors** who want to modify and debug the code. -**Prerequisites:** -- Python 3.12 or higher -- Chrome browser installed -- ChromeDriver (see setup below) +**Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed **ChromeDriver Setup:** 1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome 2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) -3. **Make accessible**: - - Place in PATH (`/usr/local/bin` on macOS/Linux) +3. **Make it accessible**: + - Place ChromeDriver in PATH (`/usr/local/bin` on macOS/Linux) - Or set: `export CHROMEDRIVER_PATH=/path/to/chromedriver` - if no CHROMEDRIVER_PATH is set, the server will try to find it automatically by checking common locations @@ -194,6 +205,7 @@ Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-serve MIT License -โš ๏ธ **Important:** Use responsibly and in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +## Acknowledgements +Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). -**Acknowledgements:** Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) and [Model Context Protocol](https://modelcontextprotocol.io/). +โš ๏ธ Use responsibly and in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. diff --git a/assets/icons/linkedin.svg b/assets/icons/linkedin.svg new file mode 100644 index 00000000..4d5b353c --- /dev/null +++ b/assets/icons/linkedin.svg @@ -0,0 +1 @@ + diff --git a/assets/screenshots/screenshot.png b/assets/screenshots/screenshot.png new file mode 100644 index 0000000000000000000000000000000000000000..935ac47e8b3450048c6fbf1a77eb20e032deed91 GIT binary patch literal 428200 zcmY(qV{|1^(>5IE#C9gOJ#i+sZQGdGwsB%>V%yfl6Wg}+<$m7v;9K>hSD&+1@7>i^ zwX3eHDnda{903*=76b$YK~h3Q2?PYH7X$=C4*>Q3i>1_q$@c}+SxH<7r1~%Z2?z)g zh@^<1io4#q545++;^RXGf0B%+kkFOHl~NI@tS2<}TokO53WUn9jQh>Z$hjf_i706K z_eJ6g*-AnpTc*T!YvpJ&ohK=n%>OgNjj`uxa&q12FZV@G*W;>o{>+h!5Wk)mL{_A^ zx+3vY;8HizC`K|$Jc>I%1WSfM?p1ZovuY$fW{i60v(t?U#QzmNdx${$w}|H$A*I2(Z3xl<1lan`aw2HGUV#dT(%H^9{LO=Ue)! zBIRG^{rr?y!+MLD$rI$HM5NcJB-{R|9LtHjAR3^{x}vJ15X%*7HjE7>>6!-mhj;s4 zV}Q|b>0|(yu=#Z}JlvOs$@q7rPSL1KiT(N?+5RoKO-R%`%9P9hF_hqi33|vsVB2{l zl66KCfRF@JyHreQ)=j{exji2KYr=c%bA#0s?0joT+czWHKf3n|6^sS+v1eh`@*g`X zcy%SnTkaJJ3kVAs3)l%!{OtRS78oU(L5*p{cuL~8rv6*pKTzE5Ac1)@9?cFi@9ZB9 zMcm9n+Ma)UgFVhh^uE**tf|lpI*~>pfV`OhPmQ0{5GG*fxk8Tu&Rfy&t>260T1vI$ z{b!*s7BHNt+%G4`&+L&gh~=K3hIe!v(ElR=X@MZ-sZ{sGZ#Bw*AYh1BM|qXYD1BNb zZQ*icL3dPknk&)IbdU+5fli9jaR_XCiF6PzS?)$1gztw0s3U`Z-$LRYY|$%;LFuG} zyt%Cj|G&5QgARy3oc@xN)ypelA$|ng9!nelFP;BK@Bfi^Rsa=-f6y}(WHKy97UGOw zh={hP7nP$bPJ;HkCaQAxb<6b+xcuYB0T{gXnmAaRv_K4w7}^FkiI?*)42Vt`RyBL( zbK-79D z>+~f5gq9*hGx!22yQMe%Kdw_B2*PUT+=Qm?=|KAYMU&D$0LRjlMmhw#oM;G7jp*0o zaQs`$|3<;UFJ)x83iB%Ln;X^j$L&OWHJ7d1L8NlTU-p3S2PSw;c!-8H=K9}e6d zhvlyEJ}p^d6;AF=vXKlI4^)h|MyW9g8m#qM+$ zV^aE)J9p7;?oGun*uyW8-}-x&hx(RD>gU8PpcJ&ddUXucSmOWdi+%vm6Mk_}4X*Tp znKJVIw9@)xRS?Rqjg<*%Jk}%U;uO~O1Qp*hjV9uLp*x}C`yi=uw0(WbN1C_pawW?;=~ zKPInbgXs55G0uoyY6`I}t(e(mj_ZpE8Ebv4=^@PYd>TvrxO-vFic^W(M$`f+DI`$) zg8Y$?`Rd1+s%wDJ!N=5zAk-HtIvZ;%Vz$ivy!Eszw2?@y~w2Mb6Pw0 zej;u5N3N~lUz^GWCW=ZI>%Q!XDbJg5~f4DGT7DGa5=%X+G= znU{mmMZ410>1pX;(*+Dx76lq=A+RTZk}I;f{Iq{c2Ognb*O6OroD#6Zyt&KbrN^nm zpP0UWiXp02UuE~J#8n5eHPzLKAJP5pU-wSqjQpA`kl_{WGl-f)l&U?(7)5`~eKp)= zZj5+5Hby!=PkdYvs2HF15q)u{;>*1#{i^ZcR*Cq~2sZ@>v4l4Xj7(W_NaiZcUmvip zUc4_-P)Kk-ay+|{_3L@JL|CnQ7J8yMJyZR)sEn~$X`0{P{E!2p|COl!kt!IMM$Hks zZN4@VedsQ&r1E0sJ*fC!hx(*MHt>RcB$#?+aV=x#rpgOs-ysIU%YzNnDh-)G$@6hL zbYuLYbDty-GDY3;JMKCAJ982en`UdRbu#1bcg-c?}BUS(ciSE5pG zK_GR8eAI0oTf;WVy8Am?xj&-vquEK_@csqFA9~; zb9zta$D|Wuy+>{g8hw^4o=Pn;*RMN)H|c!hcK_iH5RfUgX{s6xfq(YPU6SdxEy{#S zWe^M5ms`(S+YVKD@QfMKx()K}RaEh=oep!V$*)X1bUL)mj3uTh@)i8M50`gCPY63X z)uPD)F@7#MvDEkt6}bRsSbI;ynBUXY`|pM=4G#(B%OR9fPDbs2NUET0i+U1VUsRKf6q@g}h_0UrBK4`Z22eVdrJIEBXZF{XGP7{p4bhu53?!Fy?|J z-P_gW76}?i9q~-(3`l3-3ZQ)5pCx|wPMtFrl%feoKSi>^Flmhz&4;@C<=c>eQqVEv z81JndOn(P=u!ovDaJ{{y!mlXS2qBG1vWARS-=aU=oS(%X>XF8$DDMT%zs$UXyf-Vt zdVBs&=@*^u4u?^&7^0DNZNb@Sy|A3IhwI(@GAgORX#LO8{hxe<05pNZ;LVPOBLg*$ z`88iE#cC8Y10li9es4MdVtu9xK6KS!n`{uy9cYe#Za;z7vnq-io)7xMkZ=xuazH=6 z>b$a{vD}BwnRq^$+J&-NIRi^$3Xvs8+B?cvI%i7naCHJfGIoLte=|umJ)Rs2si;KM zB!y5h1&hJT$+O&Kq%q_I59>lts$io~PPjLB5Yy^nNXj;Bu>1qFZznNDzR}YC6lIx* zl$FAIL5pmJIm{|`DY{1-N*rBia==>8q3)J*8!wAcvaZgb6m!NTNU6ey+P$FzuThI^ zXtUrbJWOAKfCYqNxSqu`m7a)ApzC#5e5+{dT?m$FHhB`EM8)}$G3evL`Ejq&i=2`D zBrkZxip@&SpZnYa5USR(f!HJk*)b&-s{V*)P0i9tv)mTlx|7d*fhhLOl{d=*mVuVS zadn(8JDrfcg4awa9XGKfJe@^0WL zSyB``OpJ@~a@k8*1f(Twn+CZsTZ6CO=WtEIHKx#y0hmN zkY3FmegVu@2S zrg@aY@P9HSI-aw~@p(OXQ$qkdv`ZxU0Zvj~Vvx%3KGC?WIE22>puV3Tw6FK45Fs28 zu*DQ9_nfprd=Cnv@3({12XGX*OdCdQC@S`l32Rh2sV%8^;4Pp!oju#U#O13EnUv1h zt%W9~zMAMqoO)){#J!mM<$>}xBMT6l0@itHE>CO(`@6!Ospgj(sjm)uUo6S&b2;QE zNMIR*>9DfE)%c4C?rq0FJY#uL)k7`bJM-cu^l)dw5#3YgL&-qNw-R*d6BD{_KwB5o zAoI=#OY&w^Cr@L@%vUhn*9t!+$?=C3#llKczl2`?JaJ^#TWMqZ3S36%7i<@MQCsng z+iB)Y4r%Vo714DrQSb4FI5utlckOk8eXIO`Ys)#2U#DlX9Kz11<0{p`=PRVh-{Ii% z^Ru=Hc(eWKBO)rkGaQ2#9@z zZsFHU=Twb9f>a31BN^)^)j#N=O$$I6DTG`pQV3IWyLe=WUrtz52ytV-U_Na2zGIa_ z+4?E<62a64m}_Q?ue#-;OSM!V0O`BfkvlEk89QacwPKTZc-q8#_pqU68%AFKajqv* zH_*At6*`xl{2YN|@Ne)<<=#<$TGYaV=<#wRhESbxWJbPQ16&uA& zi|z@?20h{qW;u?%o3pEHSqlj^nwGF1fELW5EOgdeQv zCvW7#Z7LK?xn3VHLRgBWiF>yI0s^F!YP7q%yC^??1Z{XdZcii#d%OtWlC0REEHIkT zqyHgp0yl&7X#opss!mOzjhzC8aY-T>qRqd?6E$oG@oKgOi(8S>gfC`I?|2T18>`+>iqFlEG77<<9n@0 z9$yy?VgiQ$m>w?Fl109Ev)5b{nX3E5I@g|&wn*(jnDddj_FLCXVd(&$(eb?X1%h9w zm_vW5b9YgC9Z)4L4!RGAi5($2@UT@ z44DV*;KCISjqQy-K`YPFm(PezAmtB`!UCgO5b?iZvc{jBRMxPZRhTQ0|6*77Zkj7w z8SK@&&)QESI~h)t+x9%$GcPI!I!*`=on-Fh&00t(O_+N}=~j7%SdOHYqUt*|)TX(FFIbq|O%Ni8+ z#AUSMj@POmIe4J!KzSWcbLTiHP5i@tw^>HV*WG6W0$7w|2Mw$p=9Ssa9+SH|2!Kmg$}jfWMpJUPc<_^6%5%} zIG0&W#bl(fk|aWbcY?)C{X57tNx*JkT91XX0~oF?dO<3AL{w}+34f-p-it3ivN5G zVMt1CcqysGs8>7)*<%k1T_l`;i}>>RM~RmS{${f=G$Q_b9N5d9V5w@M>fP&TN(a&3 zEUqxoD6E}36yZ9P6{$wcc}!wK{`;8+Jm36!QzRsRKPu&b;#OrucVLEcn&Nu7S@Hd* z8mK&ND7aP%>S3_g_tGdO^0}ekJ2JX2lUJy9z@gJ=#Wpltxh6YhZg;zi@JhhS`xiD~ z<{OMcIzlS`_Z<7#kP)3$5pm1!3w$^x*FP`Mz?Bh092Tr>UC}@g3db$=5TFdyAMYzW4(eRPZie2qgKdo7K2Okk$I?7J2PZ6_&^7fC=5R``{`h>U~h?6hMuE#y&b zsS%Y3O9o3RREyni5GLWGrl5F~-{wA5v!b}{>_~lIKO%=y2hVAX0n{#45h#D6F+GR# z5y5Thf(wU%X#YeAK&S3zgm=)SoatEaqlg*wL!&|y%k{p`lc-y@LC@4)WFXvFUBdmr zIJ2%oRXDikn=r{9-5-Ul_Ts^^bUcU4It1=q!1AbBZhGngYqDUCu+lw3yPtxDph&Q} zGzPy?vD!dZV&Iv&+>8fzk4{l{m5We$2mag)OedMtS(;e+3A;qwcC(ZVri9oxx zW-mPyhmz~eN-T%pJShHP%{DfuR$!7+ZAWDJF4;n(>LheK!HZe&#ahtCQo>Rq9U}DU z4>z~{--(|yvVl;vV(~mEVHaiuTU$q*bXxUZ%um?uxPcLcE-j4DpnfcFeLoG`X#8C$ zcefz(cP)~OoSd5IXj#)7H7Ve(sAgRUoo|3gS;OFPYnM*z{t9A8wtVxYMslE{{`h*- zjr$l^j)$0onPxfauIF0S^%E-z|EiDQS6-)tT%99HH1o&godh7_7*{AUZNOZvvOjJU zJnc6cfnGooJ5(1`L|w~#TZ$UsN3SnOt7*+2CW{AtGD-97EF;u;Kh5u87vdwifUq-J zd;9dzJKoV7KpYYuKWNg1udg2j1tZjT-_2zpx0_>d4j5Adxq*?SQ(%6emON#T7?C)f zNn!9L$W;8O30U<)5ThwTeH)njmEV#vWg$=t(@i{nXFsf84A za>TZ>5gNcDBO?!=VnBx^c45Oz--bgn(70zSP-|eTV4wzva8S_+80{qDbHR(dx(_E!(a}`TQV^dn1COH0YT^iPn0e_Sy>{l`%t?`$v(@0x z411Bf^bs{G1Ko;R4HCn^I>NqHU=iiN?Hw*HbC}eO49?%BaWwb%*UEPP?#m1CP6?=u zHb@c1Gz*S@@AzgN{#$ybh5R#hT2UtKnow30t7Tx&>9rul1O!C*Vuv$QXhF#oTX{6- z6>#CJso=e_9(d9F8)&zcssJxTV?@*a{4hI+EBte*?;yn!i@l6JHbWOYZiAIy?@!e6 zJ%9X!h;+qdA(AT42&dr1_-Pt{^BE^BH<^3Ky70$F!PfSfQvP)MTtvSR)((rgszlmy>y-!+lrO$hm& ziJrIJ%{D5k-g|r*cdpg(o&Nh1J*I3&IRr{0*Y>;p3E8@|l7 zUBiqEH}jB3sfj|4^WK`sqSFXMxiP(!Wt@XUZc>Fo-W?IMabiOw5e(MtjW}Fh-CeUn z<=L#TxK~)2Cex~8ipLR%=IUj)ZS)EJq~n0bpOhOD0yQ)GI>`2ustmvb$;phR&U{|q z3!XO&(k1G)TC@)B4~Zfn?u+~_z{C<;E^Nybx=%S57z{qiK~+ZG1ykFYlRL}O>xi~q zlzt&m6-XVsmK>E*9mw5eg=UF^jxE7eX=GG@g7EtT=d{(W7u* z=x{L3SP42j9zQBlt_Z1pJOV~PL81155L1y((7&5xvn})x!gKzR+6xWz*D21}-aNpd z7J4l=5cgVBgs!FBl1&O|OsZIHMjDO72P+5)Y7l9dEqA1MB=Xj#1(`V&%5JS@e7NBF z{do|Q(${ct1P^Q1w%N!@$O`C^rT7XizYr|m^A=b#+e7}z#i?Z2`7T|^>^SspiHAK? zPE$axa5k)Z*s3GBu3p z>A<*s&U~M;c$ls>E2tKB+Sb<3ekz6_LbF*xE=ZM1Gml=LViZavQ^C$}JQ))h(aztf2^R=)Pj4a#_5R**A9kK6JV) zaj<)M$yjErq+x^m#p&7xgCRN@oH3w=Gi`SgR*d0u_V5`StQ*#LGWhU zX6@ANL0N4qiyEF<)eN(t`R`r;kGFnqsK|f%Lt+}5Xoh(zC@4R@U34u%Y$Eu z_8>Ng&{T?lSWXu!qJ*K5cJ?EJX0ZEr&d!3CDSyr$XLC>~)FRvI^u*TmI%H&9`Kp-?a#eHTQj3f?FvSeB7pbc5)BC*k4%{+VNLO98X#QpISl^P~{w z6H(Q=+Wc54jKhFJH3YC(b0pcxoQL3c(MCM;>Z7-K~Q&<9Ue~ED>b^Y zd0a1h%NU`Ei5fvrQCEr%^>m8S*k9zRmrJftWL2coYJ`l0z(V0KOmXnf>~NXP>{w1@Gtg9FXDTv0d^Dh} ztn_(r24%g+2UANQ(>3OZ&@qWKy@A!FZFtgQFdCGuCJM|1Ny7V-L*eG_#o%KY!5tqX z2I7I)m9jqZn`- z1E_oPXJ8GHH>US_oQT(Pqn3@k<7oGmFfn8|uS3)-7RQTdxrsBIVa2|JvvVo>>j<MxtXgUMF+pDrE`@Sy5{(wxRw^m&dfK z>I!fkEUML)&n#Xw$sn})bx?HF6fS3*2>JQM$Fo(e)%tORqtiWu?M+(-*V`!H9!}Oj z@ByJ3kDy#KDKcv`gq*+!_S1e-B+PYk6H+Ff0}q3yGot0JGos~06GB=D)#t`qyOIX8z3K8n#U}V zztVd)8~XX|Vj)12=J$)Es0c_-d=EmajG!rd|F%=;M@+(t|6}taSRn2)WO%e6xqg@F zP1BQjU1=I!2`oA#8-C!XCBDT#vQqB@oP_kMKseLvM2`9QmR&Rq8pZZ-Oy===Ir~o4 zO@zNN04W`34fIUk=29AoN4-@t42?Smiuy)}`RD$;qQy!*;(HW@!|g1_RD4prLh2)5>K8g>kt-Yw^rk6^xy2QVqeT0 z`fo0Ro-z_Bn_30x%0s~C9Sn!Xu=}Q3aebHCbl*ix#i;(b&87`!jbTW6tb1Y*WYWyK zVLo`XND~eFUaJU*3ek~3R-Z5+`4@ZA^<|(1uFh`aD!kJRi4f^H(#*C!0U@CZW^(0M z;T`$N@p7yC`Ex`_r;3o}J(>{l?R9SY$(7~KhyNH)1!DKu%wLSWG%V)M8{VhyBpfUf95P8u%EDPA9YZr<00T1?~b3 zeJnm-z^4z-98cgXC9uXja4*7(+97G6c~GM9Vg^kZ3aNj0NV@iR?S)BUS;fr!kZZH( zkNBo`I#8$&LlaT#lGRBDxm^KXM2tmJxEoBWG%y&gg!7YOzTxSY%kd1TcpUz~u_B@H zQ7)DmPt;$tFh1%?iCYZXB~ty(K;Tb56poKp2wy?SQQv9ADt&$>mkgtqk9OCyxWCyv zJJV>CRPa^$1`L8s`GXlvnTnMfiZt^(0-uA>giK)|kPF>hAquf{k%SXoG~{i$#XDXR z5ZLBo7lW``vQNym!(jJUTbaq+=ixEZy6<5rJ5{!neqSl?T#8{!Vn?A&iP<((ZA4(%S^0@HMdL zi6}M2?8C`%FQX7(M!&f+?zZT@eWN(C%Qn2AG}?{qxR9we8Znz)+yf&&p=H?0u=n7+ z8=UysU2Wl-`jIxg9(w17oNGn`}KenEaXg!o4UpC z6^nXFViLd%E(ujLTjp~%b)Ek>ph=C^J2pt@45>*Af%Xohb{Kq>C@oCZ>};E)SL8P) zb-c#8QFDMhAe$J1nOX5ZP_Ix+wEZGe^>lkVVe4jT1&&HTv@86UlzJtia71n==t-cr zJW0IH_6_m7KEej=^DvZZ?#-Bdjl@xa5Ug67HyOv25VI%&)UB@gT=KaGj>HiZ>a6Sn zrZur)U?fE_;R-=LdjOuAb(xwvHtZ@;0=R^TtQ|e5xP$`L1#As&g|L`(etQBqP)1{bZT z+bX|wEK94KBC@hxP z_wW1tt4p9M1`-sSA}XzX4`MZ|{OSi&;vI25y2gKVJWtc;4in61-~- z!QW>VZLboV!7hd!5A<3>MP6Wx)0l?i_BOT6Rx@}Kg->jgfZfn{G@Na~D?w!dOuJyPxQ>joxc)HpUcaedoOGQU#(19qPNG2!OSgO{YDN_6a z5sWEfqy&pUw*Y3dL~Db-{HgW_#C-(G7K0&B zwL-1XI*kDxmf7p^7nXtXh;PgeX{d7A$NL>4)9+h|*NcieedE!1VovXUfK(cF%p1~W zq0x#Gl~w`1nr{ng3QcOizBvOi4L9!Ih+oY0Ialp>4z#K+T=iRY0);RcN67#7FIPGW z2de#|RUo@{H)ZLf+ZqRPrl5fW;vl~)Pqdzc1jC?+IEnse;&OvI7K2`Qbjj5M6q})f zvP7g8CC1+G8RlR- zWjOZ7TAkwn1qrAoaN6}^HD5UF5daU{yG^-wBj&k=c?i;f;ZTcbx2A@ z1^r*8W<(oYYN2U`n)_APB>)bNT$Dyxpzd~fgwpWh&q1Y$#gm>qZ%jnrrM?>Is*OUK z2e8vTA@$^e3&oG!nVV7@?HhOzO=i$dK9`-tH{LZDJ8!Pk1mw0%M-+e}ZST1Qnu3ms z8)zFHEQA~wi$S0!pW%2IDHOjNXmLDJx|L#%&>DL)&_n9bO7iV=Q{5aXU++((cPyb1 zvHiE)Y^57;@qXHe)yyKroG$)yaJL&IAMCZOCwA+`EYoWub-_KH?C+E=aJ;>j)8T9( zu+kS;P5`5nW;?8RyIqUoDnIse%|0R6BxJ(BV*3+UR0Q#D*8najlT!Bdhs6%JD@7`G z*IIzS%n1h8z+1kb0)M^mFdP}8AK00wSR`hOQQfb6qbm@(c;w zwIv{_RNikNP%ftnP(007Vo_!fDEO&_YIqP2DyL1+D}=wfNl|m^Jv~Bi(U(N!0wbzA^z;L=a@`q{SlMN7Mwc^&ExhSiFlDQfE*u|1-Qp0M>QtBc{#4R85xEft^a1LCuMVU1u=s&1U_G7%fIPKCoQfv_`$%q8p~rE<3n zrO#$D1K@G}41H>E^q2p|U$lI_pGc%jC_0jGUku48<6rj&z^oeK+B9I;9R^B>FtWa) z-@(lhwAq17R-q2!D2dr?JY#%H|2+E_31;es_RICnsoj`YkV>PEA-WK#J$5*e} z7%Yy$QfxZaPNsX99l#bbLF%IgGIOuE_jMR(etJfv}+(n3%yb0E&*_kk*vCn&fQ zCXFPL&%rOcHoCQ2PEvf1Q34>q^mV;G-8|nI17&AN(`u;xsMkb(_iR9+QOfm)qvY<~ z>mMm}>sT&*R4kRnp^{7OzCLI#cb!oRg|ioNg;b=lxD^Shy4xPskLLaQMs+*IibNw4 zhh{6x8T(T`WNVi*DCKi{hv5ABjVd-LWF3lsm0%=f?ot^?27U3HQW3NS%N(rws>=6ddIVunmJO1F@tijOW{7 znDYM#S;+kJk3|<-U3S<%w!D=Y#T*tA1W79W+g~dWm9XDqm)+ucKMLCUq{ry}w80{i z!Gc7}j~BijzaAdZ4VTT2Of4dFLn?lW+Ul1^`H28OCBB1_t@bm0vb)FWfns-WPq9u2 zXMa=`@I=~%-sX_N(&~8-?O^MxG+#G>4Md8lBY8Ow{`(VJ2}rj}K;LL<2c&lu{5+3k zyrNN|_(UGlIIJ^b7dO7ywV!FRcWbmNG3|(M^N19 zM;ZS?)qCCuY@!|PPW(^T{kz+DgDXo@1c0DjvGJ99%yOd@J1Qm5^f zgo1*=THROrjjy=x*lGJvu$H9-dXkPM`rU{uMyps%$_Q-RKrU0RQmX;OUhPk~e)C6Q zD!rD$H~Lj}f0!BW@rusANke&lvTmw~rwM^+$Cq%at(=*rIi|EkT!kF&N(@mTQqy}? zZNN4DBRObr#A3Lgyvgy}0G^|gCphqBoW+f=_vA0Qm^@ix=$G+RE7}XzdF%w0QW1bb zua;;C9|9666duC5|HT_uiEMC5J5(EPMNU;J*MOOQ?fV5fbJE!A^+eI`W*w$zOtBXy z>}N22nVSznNE=bUngoqCvIA5EvV<0+qn ze6U5?C06``fvsAq5r^*lkdyxA1;7c`qeOIccuQmnpb5WhfLzwx4Ek-+rH*y`WNWp) zXpOdtNYdS!G2MLJeG--c_fgu}?DU+o#KP-ZhuxckuBG|F!NNSI%;-7GFmf(jI7|jg zcIh~~$pma5%WsAzdmS6z@f6y)xnek>^6RhxoH)z*!vf1YGa!p8>~I`mnB7LZ;ZjYH z!D@8dXeK4nYLl(t)3Vz!Aod4e=-HBeo>vQjdxaF2h{<#qwQ^~J%*wY>UU08Adx!m1 zeceaC4enUm%HE^>KpA@_6*Y`(i%6;9$oGt~vY5I4q0`|W3BP6eS9)jMkaUmu3oOQL z{cr2_j^8L&Tlfx2670SuE{%8hKj>Z+t?K2tONyq6Dp5%l8%4+`f&7rFe^f|@HWz8t zYi-|LZ9Y8U7_!{fs8^ayrP9RrC}d~KW_e8XO`+cR{BnA04N0$`R!9|I4)X{YWDyck zK*b&mzswu@1!f3kGdhFVw|t^f1gb?ZPF3?vcl61-0wV;)V(}%L|3xHG>|7IyGg&{{ zRBCr1;#Ory$P<9dh>llY>S_^*f6kR@R$y}2zrq|oOIW-+B_+9;U3e>X4zc0d;IAcJ zlKOT33=c*K%LFx0(12_G%Bf!)(s62 zlTdREb`q~7$hAwwOoPStg}ja6=_1U~>HiU?(75=|6F@g2+>Gnt3$%l26{4t> z;>d-p@t>;sO9HIJzUPN zh$=NWGWpG;sG|A1yhzyS;D6e!_pPrON9ZiQ)XhXpA|Cq>xDb9rUP08#l*a4JHMKS{ zJ&$vX9j(9LIB*v$5K~r>uo!YiK0n@OT7FVm`|=n>3&W(E2^dK&d6{L~^f zI&DJ1De}MUNO~(JkrWz5AZ<45L(u&o;m~`(Nh~HWsFw5`&fh3IJ17>LmF#l4t5J;I zS<#^2TO+pz(lf1&q)HvlmP0VLg2JK3_s5Drc5BSxSRBc%sf@00l79%;OaMIJ_t1YZ z9M#3=mjW{W5kX0KS*o}gS+2DBhtQm!=kPS@PDy+x^vW^^%nmcibb6d=Mok&YHt4&t z($TC>N~gslMSY{f@k6_>FO4R7}?TU{@3QyFxF z!;lC_y<#1bX;cX&Qt5@$S&H-K%M&c>4X(OBEH;ZkH_v7X?~bO2KD-j#3mtshx30X9 z3d9~xfjssEp@=x)38dmXdvO9{3h@F20+Ibw+CLSFrA$v94kprytvPI#Ws({Nrx*Bo zW3xME3p{*YWsfq$$b!==_~bt)5NF@~KcTcOvH`HyLwjLMj_1pg=rk$-Vq&`jF`FbD zR*Zba(k%~QEp=?>2 ze!3BH3+y^?vp-+H_JM z8K>@fe@9g&gBLCr0WaA13TZG=lsEYZslr#}NbW-6po%*@PQG^-;q1FGMx9Kb8esYC zzLM|Mtm7@O{br>?+M6o!TsKNI>b$uQr*p$j=7;mmYmk`CCb1Li<*>gXH!tS!_C{oT z(+YIzxYB9 zSPC5+hYe-fNx^8`cmDrzE+k-+$D_9a!3X0ZVpnVh0Q||Q92{(6Az?!%>``jf!r8uQ z>|on9*X_-zBn6yh{`fG#Nh+ktJ9PD-1$_?8#XXq*dMuF+S6k#CAK{~I?~u29_b2mI z%FT4udR>8mkav6cW6I0Vq!X%8Y__iqU_n6+_s@zt9WIgGtWZY*W645)^XPg)lPhnE zOn9H_Y#0k2D0*|~4nd5Gk->wJ!?QMbI>>WY143SmAb@>K!Nn6>!`&ZoH+!pHR62RQ zQMRCA*|}5B8qOvf1iz{9TfL5OBhvulx!uKii=b=Gnzb%t91xI)K~p8WhUH2D={R7M z+LokROPc)XkniwTgM28?Sq(m23%%7M77`3(WP|f}Q*`4}xM6Boayz5gD-thaRO=bp zw^2+ZBB4K9Bqb65#G_f22KBp>H_UjXn)Ko#2)lD865dcT?xLi=A&6{rWgXsWGGL33 z8(Vw`8%CJRmBZF{9@3$!!8-4JeMQpHPApzD{yM=U8#_u&kx?lpytsg!MANDDHlwg` z!Ho>AcbIi!dOn{0=t9)!GGpbEbGuvs<8gf#e2hW-kQinr1m2c>k}Ueo;wE1kw58t@K@jE)`rH+$9KBeZJy?u~$qlG1qL znE9I-fr};B1W%E7wOdY(mm`3{wuXw!I__j-O^M`3Txu63J@yt#F!Br( zi_tdzo0ZN+LG4|NreMv+>9kL^^6H-BOdHv$2({L1KTy;2845~x57C&fVC53H5#x4h zj!G2>nPZ8GA(#3VbpX{9V9WEgcbTnyyw!r|?LA;2Q%ojaXs|P9qTlL$0w#(H=Hf4g zL>L5G@*KwLa1HUv>j+DTbRsQ9T()2$?VdBh7gt+>VZz!-zA3ihN&oG8E>fBe&TkYD zkVk{_KrfAkL-TjXW8}jr^FJ6RpLGUDm%{Gftd^&Xt?gsF0dj$6G1sMsWf#JN zk~6|Ya>>%4@02grkd(q`xl}kl9$EUaQJ(*ERCVJO0k_a0Unm%q6v6H4W$|IHXpHs7 zuGDSkiqL;7snlq0P*LHnzDzpMNscHrcf(gmk1`DGZ&B%}YAuu1KX16DYQ6q(d-dMw zval(ufbYStm-*>wiN}g>dSX~!z&5l;AA}#54WL$mP^H}>BqGAGKb?z2(-b-UG!Qmx zG1tfi1LAp)7r6CwDO%438F!@e{p>;oTyzJTi|^ssAFngkuO+;Rh@7@# z4n@2}fkU(kU8~wYR@d(W&C& z@NJzA#$&R1{DfPoZSxGTuw;g&g!VEB@{Eexsq{LreOF#tBcPW10JYzyUTE6jIhxdh zz3Ev&BbUn`GLgoaKY{M>jfe^t!$!x$1r8riH@V7*UBVP-V9uet_CGZw=8N8kjooV~Aby!0D-@O1lPkrBG1QI7^4JGl$ataQB!@s)z5qsjh8*(2^ z9^*^*c*^)W+z(jXjn-0JU5VrcC|0VJ;UL!Y-B_Y;+AXeJ-zi}*t$OvG_W>R-?a-k) z$M)0oCoDQ$_~nL|!QvBboyiUAWEOwU_(IK>0Yq4^!DE&O6FIL?#XsG=pB8myi?CVz z&WJ5`8-f*>GF5Qj11t#(fmg1l!5cjvLR$~VD_G8E(|rr&tW2)w61YpQV%<@JbdqT@ z_TI`gM6k1C=&y8t@JCXwRgay`dix{*OcW<7)B)%OIhT?y-%ZIrhAmuqMt0nKVVv6x zI-Sv|Ls4Kfgj7x4Y|OLT<Io4LyAaEK3)ahEcuO@QJ-GyhBhe$v zHQgc_8m=Oue80B32@53tKc>zqDy}Zt(uKRbyAvpaySqbz1}G$WaCdk2;0_5M+}%9{ zcL@^Q9s2y;eaExJ^bQBC^o-r(VpL>f{-_cMw9uv>@o z4VW)24p5EOi=~)$G%|60RjbXl=JN^zgACN*08bvMgSW1LB)TZ$%bl->o81`LILX_` z6|yID9#Krh#A7?|&K-v-UeWG%& z7P`2d-JST}9r4|$!-eVJLRIv?&zl1miM!y(V|BHoU%fxn=1dMgc&gM?b#xOT)!I@8 zwz!`&-<@oP>or;@a;5aif+L&fMN>wVXU zz540Pi(VL|EP+t`9DD8RzEWk;-I`>C{6C?Z=?BRDH{qcT*@Z4z;$*SilzmpgG<3+@ z%P3p>lQv83_wHPtW#k5KyjNJBH>pgYac_ya`t7^lVjOqNOmEWjAd6?F9?z!=4|l7L zwW}Wv*N=s5SKkCPc|1w>%@EMZ1tOkqZH!-^@@AU8DV}U}VgM9E?@w-$3%g;S@+&Q2 zQqf-s#G9m)ScLj!zeulC57|!y+VQ4RWG&UY`LT@VEGGJDjU^drhkIcVB4(muI^y*j ze{KVhA0jHMOdxctdY6Q2HlHKrTx+mi_7m|s8BV|y{?zQiJmFXs6%^={m`O~}PxT{uZ zW@UzgBKC}KP-}O39}%l6qZWW+y=WKE>FqywLv(#HIt;TdDNj*V-MECO#D#(icOH*V z-7_##a((b&C^#Hrv^_!jW;*yXS+gWH=}CGU--L@~9ea8Plys6aW(T<3ik zzu|lao8j#x8nL%e`f9h`zI)hvy=`kWS*KF35qU70psdND*^fY=$sW~I&5xFjVH{S+ zoFQI&uY=X4M!R@`y?_u1Silxbv8MtOH~#07fCsxo9(>BL%Z9AD_U+S;`507f_p`O3 zQf|c~Eop_%(y^J`#-;Aoa99x5%9VaW3j<@!)-i_q>Zb4PRGJ#KU?#U~;gpI89=}&t z!9)X~-7F@hAmRNcb-YmJ_;5kN=YMG*Lo9-8+LakYADflL9N-zx$uMKHSZ#8($HeDn z#tz+orAdrrenI~nk#Qh_D4&6kmqY}u)F4s6TGs%BUr*H4MbD~hi1BQCP7X1b9Nz{mOf_4F1Y`)DhJ{^25eBFs zrxvg`&iHLL{Dgd@!wOD;27;PgX_dZTu?6gdMOnw%J;engGjSt@k8kXZHMa+;D?+O? zp7?CxARx@)!@}=TNjhgI*pz$x_}AjFZO_;!pdi>oTHmU!og^QcXjI9*LRQQ4$AP&j zfSx>p6U%=`TL>D-g8b^~r_c8S=m=k_Y#EQ+LN)%|mUqGUG?Nkhu)X2?qqZ5|sGZTx zbjZ!PtY>F!?_udUO;X1a(U$;+ELJw2Li3@aX2?f;)0fAD3up^M11avP8lzWuS@EFg zO%4n`>5(vURU%^|)@H+@6sS~Lw>V!TcfFbuxx_Fw>5bU+k2vGA{nCyn=W{+2a@rdS zWKb=Xf9$hOWM%&Y{2Jl&Hd+NM`u4phb+dfd05e@}C=h+tDcU!?JFMOTH?jVR%nBP)tN>~J`g$tT=5`usuf*1m4i9rUN6y8U?z^+YLg4#Xst2E9G%$ z>1h-GX;sW6;{*#fwvyn+D>;se8T;zR_P_nXmOdAb)s%E_%|p|NXk+dKhLkR|fb^ZOS$4nSGmW?xFTZ zvoP#&r-B#0wIzLgoZcOtP5V^F`cKP_P0qyHE%F*+R|nOL4BN!x51U)|^Yh0<&(js2 zaDNA=f#Ef=nl4EWyBBRjUOypV>k`n9fML@6;+es3N*xsRYxigl!KF&$7^BDtv;$|7 z;CsKgd|eE(Sx-tYvAX8b>G0ravR@4o>1>~Muh6cMyx#v%3+bgsCE+2o-gH2MD!s`} zpCb`tmkNlS*Y2}wR+rNn&k#IPmn!syi>%o5x3Riv7DWYF&;bEV*Q0;bAHNA+a+E>QtK0FZx|v za5$M^$R0P5C5jq}L}~i$KDkos6UsL`Y->j^BCSGN}~QUzUXfQB;m1rx9MH>^5M-{`{D0a zn%sd5EPRu#9ErSO>@UA)cH6GKwvC#}pK_#!e-(*ru9|UQ9v$9Z;If6C(HpI`%xi4b4xwSs%NvAI ze%r8LdfNktL3L-jb`6h)#>tn@Z##nO2Ae z%25A^;)Cxd)~8A}@;5($2J1OMNUH4pb5zf9lH6Oqn2!|N4zRL9^l{Hg^tA@<=!M2G zchqnv>)wdm)}Mm)s(J!pWD|v{vqzHZE#68KLSj6aJ~4`_WD6kVsKjf~A=A)yVCD|h zx$UusvdaW4Zz*lFAKstK?q2;xd3cGQcD&f~>{Yv$_L zY$~gv&QSxp`{e+6XA`4Ym4Oq&kvv&dL-er6PW zx_L|x;(r(NBLrp^nR{|pDjJ8U8#v@>qC{ar+4x*BIS0hXqhDV)^R;Wv9xnO=b!r1D z;ND-ZNuBn_;y->MIbc7=A_SaM^7Oqtl*|3dQ{OjyX(XX1N~X4e=NxMz zlO9To+~FUDp-eH?GWT@y2Z*rtFGJuYd9KCp1HOO(B5y<+GGpOe zHZXX&A&=eWBAT}tTuxdyST1KH63JNGt~}ttqaU7o?bHa~7x+A0Wq^&T%_nn1424Di zV|0wUh=H-U=5R5OWMRp1yU*Z6u$F@2<{UaNUkJB4`n=iGQgJ7e@rGcvjDM?^fBP6G zLP~)NGc@Y;-)>cNMPI?*SYfnmjmde4>&t$DSor8GRstt1d^=X$G4jN#CG0lwn-*5X zzf~v^J(@GJu4vb54?oJZVBf=&x)#@a0Ul+2P$+vee&2-!`tG!ax3TmkeUFZZKw5KH zqHGsDjC4d~_56Mp2KUSAbjX52srf!0hmFjy2j85C#(ojm&c>C3} z`mP~P&y^VR@6Qj*`JZ@a>#k)!nA6-32O+V4AGOLM&Fwj-{7^&F*v~lHBP~eayY_x# zs3eNrPI3y!P{WUhr{l;Bk#Ep0UA=PhQR4B2ub1=MrXvw-r6~|M4w-<@)g#|z#0y$4 zZ$=GHL>*#Cb;r|~6vn`GG@&1r11^7Qd_U#kQ^HUyS`Z`@J!6R>7AAhaKO0E^mWujB z>$b{qed$FE1sWyHmJ1=$-!B7K2%px_53@n0Uw$Fq!QyPi^kN+I6q(2Vl#^HKJu_&( z^n0&X`w6{OJ8b;i^a`Lxyrt8G2%S_Zi0#yaW5(Nkp&6X+B2Bh0!QWfJt28Igt{t}5w5rU7eI5r1fK!bXYnjzV?a!{P8T)9iFtlDyTtvxS z*?Zqo-R{M3f$&Qlve9AeTkQ%FQ{nz&N{EHJyF&2D4Q)?KF~}5_kcP<4pgY7~n>AFW zhE5~>HfKDYefLxVI9IkP_O@(4ejiCR0!Z4D-+g8Z660x;V9Oa0*SCe3lU)G%vs@|W zuQq%n0r023f_~YV%k{Vxk8Nn4O5LG;g)3B3qCzGzjEU`laIubXBF=r2k%WeMtsP~9 z*4~L(olb|3^cY!DCk0}1c61ZSqI_ulmve6@h{%Yt63EeEEK(qvf}nfSfmCBI>Qgt% zKQVhB`;%}(fxMagvsKq0A`09z{HJRhbSv$6$ZK7`BsSVr`GrI;83tu-EekH|r0v2r~3cI222n_U_oLb!N&12upu=O09bvb~S`u(hCH6 z*xV!AU$@>50N>&_tStK>?k&)lpk}VW(6wxKCq)Y|B(fC~Qs* zk3k*-44LSX@ZIqN2|%l^5L6?sXyzRprd;Rq==9?IM!jJ*GSq^F>K5ANxyyS~>Eoz5@2c7?k^f7G+0}mM^Gb*J z8UKAKf!@|t>NuG2CObU|;O0Wm$gbdBaHY0P0qx$7IUY+<0v|dnh zz4JC4j9OgX3B#tk?Drbwq=)V%CgQjY$qeW;wEkAKB|1JIonHil2PLLR&^i+`YKp9` z7j!zU`THb-!Cp5y9rqi2d(%d^?&pTGlfXy4Q4J=wxH^ay6SB!H3l9hpYaW z@My##Npy;R#rZLQH?tBa8;vA33)KUstM2OMYEXW;r~#81w^Zy7MKY*^twz(kiQyDG z=LhUx07ceoYordRT)m2!&7dnxgD$F6f8uQ5yLyWQ2jB0reSVPJ#g_V~Dh;@L^XQOT z+@qwFlpqe%u-)tFbZ0I-Il263C>hWa03*$oDx<8mxN1ve2bxG1I_&J#lu}ck(f|;U=q)d|QG&h>2tmQ26}QngC5$X4VycTndzXC6cL%!W=T6kyQaC2bvr=IH21N~X@pj4V@x z|8@M6R6P0muWq+4%Ho5}6~pVl@)+P2phP_IzBQzvL?xgT&>FBm+dV^aY#<5aObVr=jk{!X^zX z3Xk6%d8^6CphjX%8g;%-{IyelYvU%(C?X%3xLfz0UjWF1SlHM*{Bp@+lw*oyfbf%2 zZ02&dMou%-5pTENZrXDUb1|RBO~~OVP|?>h)50U_w40U_5yuv0{h0;%$4&ml4JZS4 z%wv(G0z}w~3TFukpEr?F!<47QhB@I`s9cHtRf2?h0o(KN!YEH^4H?YS_Lq#+!zgf0 zzXHR8HvKV_avQL>jY|r|Yqd9u*I*#TV+>~X;1O}#1dgKc?eY2JTHECVLM}_nOrcf8 z$t=-$6av3t_f5a>^720L_lAe9;O~SSS08+?Ck>@!LQIbrz8$VMvmeB_yS-BL*sqM< z)fiUbv%UHeKjll*a-xMyrNRctONM`^lDesv#DKX$$C*frW?Xqv@Ay~?6GtH&vyO0= zuQADN`KY9C>*3Ghx6uSM$>1tpB7j$C@~8kS5IkG|%|2Vmfs}<|LLw_M2TQk!QH7j8 zwrO6^WbBYBqT36K9o+{eHO~(>nZYk$FS%P1Cxb$z3YT>Xl;MBSQgc}W+q1-O!eCJ~ zl9TG9%E$P(LTy_E=p4TChy@K;BBHP$b&ALTNij>4kLgrY;vNe(fN!=#R%4<51wI>; zXXTUjoVWh7pTF-(F)0%o6~7JS6cDvg<~2+~f_Nwc=DpUi9nO&GVIWS9K~%eN0m~Bs zbnxumD=?fEO2d`9oE(qAb$~BTO3#|q>&}a8mmx+{kNA<)>O>HekT66som~+X{Gsn! zmgep3^U3QrqEaCJC;uoPd3wv>>#rmrQ&;A)9H?4`3gKWNas#r`V71ir6)i|EZQ6J| z0jd-OT{eF+>W`;`41;Te`zN1)*|F+sx&pwtv_p##V;5B=`S8OIC68d1w_j<6~1g0e_ z)=Z);opK?~l^yonr(5|E9{)inX@c$YFSJ!3BLsD}DINp2jH~r%yaW*Z8 z3?%~sg9~7wm1d5VGK4%o0vRZQp(qD+Fq?MG^$k^^9u-dDl%Fwn@i?OU55V8_>%{j0 zOj|JrE%$HIXKv7#NT7PU_eiwk=0evjeA052n2H!S0-TuImE*F+Y|QZc(rIZRhAiLg zaair(yMT6LR7-KBRM6_@exE`ASOKw}6W)imF!m}KE|&y2mEz?%8~ zICTaU@Zka1^`8*1Kenc(dt}S?_{F2;+lme7LVe*BQ+sXxPmc;(0bZqs$}bLEgw)s4 zJL%2`5Y+r0Z|ivUtY+KcK2J_eLfs(Wr9I8PyUq5%MTCyny7RbRz?XE{{oZ&WJWq%_ zWuy4A&f>8*`%|aekL+x4U2>8*D3lmiJ#1vJmR5~22nNcSjvq7c^8~3@nvReC4l1xh zLU2DHqU7L+3^XQow^^)`%ri8&=+)2z zH=T0<25X;^bCe+F+Xs`jvG2U24b+4Kxe~`-A60VS&Wn%dm7*MmJSC@T$` z;SboeWnEmbgx=Sj`MmZRwfr9G7HSOQ*w(#hjNYIDz7ctWDfnqPNSoh-Qo>0K$<>gYhzbL(u2ZfPO9rW%7*?t<2D@W101Sn?N$c+Wn zi}J&ffZMrnki|dbryQTr;4H>#;+zl@Yc4+5J0bYaOLRBQUmB*yezO;D%ufhCYpu1G zef$%+90?)O{$C`IlB7M=e;8EIYyT|>C-0rI?-GwpnkD45gRVAs#)-!N-~t8l|I!+l zG#CvzzO%m=Wyss=yxdi*+mX!Rwa4OwXjSP=;Uw`;DYPhjS{x?#g$3dejQ=>Doe;J7 zGT?_6!hUc+aBsh`?SJC)x#6?c^2hErYK!_<619EJu4S_P>no!{8<#>VbF?7GWd^q? z`c{LODYcvKf}wSXbAIGDGa&@-Dtvdp@f!FMVS_(DqDU3%*Qb5Z4|F?SrDB|kIN}tQ z0OA%f&3;2hlcr}%6d7(PzyZC8Lq3i0Yb`{<_D$cQxB_R0VVqF|95s~CkVKX!6)82) z!3vec`1mQ+xv+9K|CWO>xGko#_s*N`-^rCTxdRMsB(Cwl)mrT;5P;nFC)hh$Kh>zn z%LE2sDomw|K$ZCmdJja(eT>+t1^s@J5whEe8$Rzg+6sOBU82|_{#zQg_QPtE$;9Do zcJ1}rvZhYE9p}AZpb@F(u7kF~yvvH-V^PdXw&8>p$Lv+}3=uG%qjLx__$lqm#jBq7Il^EP;uE zmNWTOfc-XXIIlc17y$zrbna~vEPa18n~hUIp3Gu=^#%cKGO`gA8>FBS9sYc`0L8R1!ZwTKu9{0NHN$#m`n~^8zg%>88(hV};%6N>{{ug?5pB zHXRj|ClKjnooVsM``^JkEXoT56xhDMBBs{APwmt3|8HC7p?|`? z-QrVK`8BZndr^yO*S1WggBhZbTIQ}6Y%eQAlv(btAvfks3{3k=QP z&~jla^wm^Ns)5;iP&`gu)8bG#Q@UD97GMK z0ti0d;p%hKTDM!f6vj73Maqx6asmeG6{qMVg6~`^9{_)TEW0+F^ts44EK=f#uj9={ zma_^G%xdH+gyOVN(9t2upQp%TIm*@lhXqL1htGhSO35O87{W}Oq~OXIT|5g-JFWGc z)4MqoM<%x-rpS`j}I7b7{<6K|1*w* z-#DJ_y8}mQ7YorS9L44!XPxzz>;jn;GDH(*UclRpS!h;N}3QW)b5 zI}IgEWPk2N|1|=SGqbYd(#XX00A!0=iyV~-6xeT#uo}4p+j8TDUj-q*BdAZY&z8=u zAVJWznf=w4&Z1k0ZTV7}k+=><)4rgOMisyexgYM@?Kv)PbJ^f^n$LU*%OOZSWtw2u z82apPhhDgVNFi-=Qo%RQ@x-pR0=)eAfCa)7VNgr}!j{D@AG zGU?S46&5Zl0oxlYk9{*hpVuC8vHFThu1?C61v4tYICB-Mz1;2kG{JF*R#2;ZUZ^(- z4{U~J!s7p z$iEwYKu8ns?ph}_n}H9jZzDuF#;F(#x8ddl#pw3aGG@?n_WaxW_ZLN{&5Q~f6<&*r zlMuDV>y{axRcFMhd5@w(y;DG|@=GriApj&wY>L$E45)THjFDumkG+XT)1ggfHA6r# zX1nNQ0zfL!xaoCUDr?qHPPoV0y0M3vaZPzN7wavhf;Ac3wJwxclEr(7-sqi1kl~Gc zrl20$q+nZrzy}g7)8Xd!%WiGKqNzFV#BMoH0NiMYp5&zPCWh~^W%76OEBR|=CsjW* zedRa8M~i^Tr}HlHLau7)0YCwUQL+kP-#Rtxj1;TgYf=gPkpWsAK}Lv$M11FP3l(=NH80 z6B%(IvUrR9MU$Z6_Wc?{bWOhLHJig#156U+qy_2WW;ed&jMkL-)8PQc{Y$=(85jGl zLop`i_lV@O>D?Fm?zTOga8$DRbfdGLQ-;czjHgFx8lq2|Eygs#rT}3aTizEs`_t9? zKW5`;Yp0fu;mYInazHl{iry)J+w0Er#y{uN3;uS+HZ{bX=&`+Q_2IgA#Qv{}O-U^Cu5R=nfXs1i?U3GS2r@8TgDA z>L~mrzSxoy3$F$gmF@fRQRMkP5%~$Z$qAkRnU09lO( z&f)qG_dlbXawgAk{IF_)qa^eZBHD+PzXX{vP+-9;*|6<}*z1#snhP~JDJghogP2lq z2z)r@{+TPQeiQb$)kbNDghV2UaNE6vTrK*ym5z|xV;~pDn-U6xlJT&kSaXO0(5-N}0Xa>vqOJr7`I)&04` zqWJ*i%Sy-VJ49+uq+v3<|6zp_#N#v|r<5arRi^qHV=|~%FnC?%2zlq7LBRh?c78(P z=2MdoyY+XPBCvgN+GKDH?O}7=ME!X69gQ`EH|rgUjbO1|Zc9wOy&sMrY9wj)#IgQ! zsQcn|#s!F;VNQR^C6eit9d?K4NJagSRfW`vHvO9sVsoqF(MW*lr2@aj;Zik)T|av` zWGcf*t>qdOM6(>d%J}&wHNekBCSbzQD`&=S_it|VoNUx%O#ksFM!+DGJk^8#4yf5ch*uaI4#^I{G=n@L;DYHI4Xu6y)5b@d#=38zD9sc>j zG8Y4bO6+p9L6P?%Cp=)Qul6cew(VjkAobM`e?o*GVJQesai-lJFpJIQ4wI4TeMrM~ zIaz|sV||4^@8E>3H&>PR@%dP7*cv6vf}*u|zj?Zwz79Z8^{^&^&~R$iUzk|G)Dm`j z-S+@c{pAFw>r9mmJF89uCWFdr7_q8I%ta7}BJ2eFhe(N$;>C3FP_CW~uFnaxC2&8@)Z(rAEq3_*JaJ_{cAsKQhM%@cqa<-=(VM z2RLjG4B?=hp!3)*g-2YN%>26{9aMFtALTEAP4Nb?vW|p25cUdrS&TT~vf;*78zBB^ zi(tt6SqXuMy5G@QmgPL06w&FjCHuKp9(TN0O~d??l39y6I79Ffp!G0TnrN7$9Ydwl z#H=&X(8r(#T=DO)J>#S*)$Kt3U=4p+Go;4axrZMwwu}$&2IZ?gEEMMx|p>)Xqiao)Q%Q zt*inH6tUPVLb%ibtpnh`m#9O&VErdi!SNuQ8@cmybdtZY-H}rU2qtUoreMo0(4X?l z|3>XdfLfts;>TxiVhGJFX(1qpPb!i2d(Q8XkdyBWwbDE&0Ak)O=bDJ7a$5uHEv6mV zPQG&#(Lm55{kPD_;%x_f(#*IuI#?wMmWpIwtadhF&s)3p zVHc0_OF5>YM;BIWFBUFoNXnkR$H}E0;0wQ;DEBy4hIR?Y!o-ehD9&QMclgX&)?ti7 zV0^T*JOhUpC{r=bCHu*IR~){_>=*v>xVZ|*R*Di++fv6d?W%~>-iUQIq(3|!au$9L z{M)2W9=X?-C^qGL;kCJbe>Ai&QtPxqg`iMM5`;_Nbmk59B}0SymdXqb_{DVp4d+ zrBc*{7xae}Iwprjj2)<0@|aAp`hblYB~Vf%KUPJu-4(Eg9~PxnX8#QtvJ0f)hL09~ zgLXasBiZ<`F@ca>gO-fXgLGCp8jPq_?c>R1vq36KE;z*$VCeG$r&K8?2zV2ntZAXA z$I@#e0)xXKlj+eA)DP(4{m3>SIYf?42BX;3^2J1KH{0;`KXF0_2RS@m1tOFYwJ|im~jX8*7XBhA{ zL56OaGK9WLmNzhWwLCwxuYu;?zSIup!k^4 z)e#uIP;_lJB9N`{26CkAISpcu=eH`hCHnbMK!Hnp=ggAD*xKizEmp{?Zz#0w3e6k~ z<56DqOQMy`1803S^SLvea$sV3>V2>ZT0CF10WKFFL-+<96g*g1fPpCK zVaa&5(HZu4Tzj`9-Ec2`dD)pD-~WBXqovQZ#o>)hI)y;;=M9O||_)^8gT(`2YM|2Zf`b?N1{vk^MFPht;=jG8A5}|QS-EB z*3UmllXmRIY?x`K&8&2Z>B>-<=|Fdv!+8bz%_^3gXJ0GzyOHTN5hwO(0jY;9&#w)( zKRkA*c=X(OWNW*U-L77<8eCiaH|EIs@=g$jXl8=oJ4T5DLH{s?f7Dctz_y(&E5H+B zVdhKQ+I}?jtU__~X-wmt;HogNd@)bE<>ED{e&d<`kPi)E2BjSYD|}G<-)+x@6;vX^ z1d{C4hfg)DsM1kU4bZrgq~7;zapb~dam6yzBGEt`+}2Q|zD~C_R(^Cy+V#Vj-EB79 zcu7SHpqSc8B9JmkGyzKn$x6!PF;7LgTPcJhCfH3scTi2o8gciFA6|c0cW^8=XyJ!9 z#;2d3h&U}K1}8;BY24RK$$({-c$xMpRHAT5kwS*%`)D?jqxnEylAlQ(e8i$m! z!8opHIC1eGI!{)TC(C5vhQcz1XLGobWLxk(#rdWt$h36p67~Cyz-Yt|>xtB?c0|Gh zsyDBe>>AOSu?#s$7d%ir1Azq`j&jhK_y~|34rh>3m|iiFaS{8@&M^v%clE06e)ohd zW$5@^9Wi(u=e$L*eF^)T7e&Ax$h%1jgrA4)ZiiDy6v?D07{egr#`LwJk<4}*xld8p zH6_ex#qb+*FaP)}6+n+Ew?H%Li8qoSJu3=aEFQJ<9v%+=HubmAeu6&=W|xx*|u*MxbbXJt23P(%?ZwCIodWuAfH1->FJ#U%+L$1@nJV zL8;CGvnG8S!LT#m8{}G^5%zr^I1~-hNfcU(5FdDIr*{{P{05(5^PHo{gg@aKRXe2Az{d=g)9gIoyz`QENyTksB8+Y-#w+$$m? zv&V5M65HpcX-pJ&?T$@Km+-n-~ucXW>gt-&leN zAFuERmh;%RO77Uf{f*M&+F-Mb)rq|?%}<&HFiLx)nq8W5{uZUF5`Rozs&^K0G*2@f;%DR-H4wuq}(xLU7t8akrd z%|YOOaH?i!W(UYrpkNgI(^TDkz2B6)+!-vGCv>}*1sK&EIXPhivLo^^ZRx_C>f-SAbaPLRAV&s=>Br5@ z?su{O?#?Mw$&>iG98{M36W}K>AggJ&FRuy4MSQK%xN-nVqN zt6gxi80Wj*-Zv-FP8h;LuE)H)hrKy7#PaVsKUu3f3{5}yu5&{5cwja6e>aCeaU|q1 zA!SfAsIx^<5?2S84FKUK!-Pt&ckMZRE&dDyMm2soQ*hFeIQ|dD6bn_lfeE#JatbYx z@M!w^hU;EBPooS#{tm$*S|TC8H6MffMu#I^P=7SdeD*$;Zz7quH#_nb3RqfESEG8{ zzvf12_Rvx(PXM3T05SWHR_N&=83;zD17P^Lyf}(|YGgd7cyi(Po$yZYhw1V}n#=w! z-=$6)1pm!CO$-3d0&1qpjz0n07i<{#J6SmgFpsTHJWwtAEV+z(U-+w;V~MUs3X$>p zQpdG9zK-$LRa?SryA+!|M7Zl>JKp~M`odp#g2-p|3xXd+qfo0Hu@3Bce56LH0;FE$ ze;3^Rdi)aI;*ML_v8EuLm{}0&tzsi?4KIHMCIIH}^h(REf3pGaZ`svdb?Jix^e`kJuWjS<(TI%pbv3vJKU!hFQ4X9q@?D@TFSOW#k zrBY05(9cqH(=!l>1K~O{*ovPejKLAjk(?W9tbIS!J=yF=klZ-zhwJ3@fo0bABQ+iF zidXE;*_l?!?F<3hc9XFIEW}Zxu$-#BmE>#Zi0L;?8-*hB>=4c0N;%rSZ zClCl|B9P0Ocp~jMgSQ(kiU1*w6dP*dDsg!-5c+pzPn+v1hvSYjquR_pVZ*C`X#iHg zKd?i6188sYv{M{gzP1sV{9vjrbdy~-$bGdHBI4-q?4K*=z5=3`dxhekuzBYb+OuLn83&--e{SYs zx%6WnY+bI8+}CF1z#AP=LddlYJL3w^E^h&vOBx^rDI9?JO9;`zo39aIQe`KsOvsI`5Jra!Z0vAre9^ENFaLIL&Xb*zPuMIt@53OZ`5>%fvtx)D{n# z{6=uTcnWws=^zvKq6IF<8Tr~F>WB>7hcngM!1Bq15HS6TH(GyI`=Nnnz9;KF;Q-kT+&!_knK{&5>v|LH zqd}%(`cAq6n}M_dGNApefGX{+haDD681KH}hXAA~^^{}T_u^_`Y9YuU9ua|d#~DCd zO`zg4&Sudp3)j0IYiPr*+cB{x}xmH2n>-V?!e#{-N zwW1Xwr;0}{#}s=&WzNi3x%;!~Tu|Y&>&Cwc8%cY7@}w=H<)~MZ%lC>bWlr5G2Zh7- zEYD?ZC3&Pf*#^pEd*uX10)e#D@dfvI_`2D7i5{vCBdOYQQb>14D5GB}#Z(`s*N%VO zzluMO_1f0;BxcVF8%u@4*{0Z!_x2KId#e^n=Z{7V@TuFk^-(KQ)6GwaIWQksHC!Lm zxSR}HI|Cufy;onOi`%VIjxp0}aIsCS-f@c<89QKq+q%Km4Zd9CQPKw4S;lf(o`M+? zxacr5Yxp!Yznw&X4a-k17H^c>&HiTPqw3(n#j1t8g!#<4ez4W}X?%CG+?kqNe2JUY zh-;U92tAlHapHJ&hp)tQ7*Eg-O9eFAqVSP3EpG)orPf2nu#!*HY?8m;q{X zN>b`SYAiz1kUuH3J^NOI#66^FbUV5}O`L)h8lwDUJk<=+u4XRMdn}RObq)FBR~To4 zjZlx>Dl{V7p2fgVZ(c&2= zTy~e*&l*i`GnD~$f~Zd-mOPbKZO*n%;hO20y{^@&?rF7q5gG&6gFg66*HTw2Tu;aN z#5YfW`{tjj+W*7Y{#UZkg5hAFQ5-TR88v><%C2cjxwgBVE+yVC2gOU_HuK;*&~BAX z`2t2eaodd!BL6o((zc$y-;eTAd_etw{vxicL-au1(F(MXzv%$h9Pwt&|m3=FU_s%Y*Hpz$c&Lk3Y4k9(e>C zN}+xJIBHzbiRzSJn4t&gih*>#Ys5n6KoeF@FvzHfa(^9k^@>XkWlP3-0o6~n=#v$c zAAj-9GTT8Ne*P!xdW&y;EHw>I02G?QH=4Q(%7V+dpXCP}cWCsQB2xv_2ouQff#gpC zf)VY_FzsD==$an1zisjPa_ITC(^{P{Xnr&w^8fe&+@xEv=ocFVM?D+xonuF##iD;h z*!#kV1uMhyclfT|i2!6Q$#8t|m}NbLiEi;FmqOgufFod?!Gu*53Uq6}*pcnT(isVuZdb;V-9y}J#MqSD z_73H!dRIf;6hTR6e+|TYwWuRDdR~UXB>q2yeFaop&9d$wfj|fl2m}kR!3n|Lo!}NU zxJv@TA$V|i3m!DM4+M92*TDxLV3^^}f9^Z-?mPF~wR-m2YpeUZb z0cBKVlt3veFpv3~`QzPH?49GhZ9T;*x{Sf|l-P_=3`NoY#;=(!;omD?77|K{sGg_z#b;s=o)IRO2m&~y6&vJNA``gM?yrJfq3e8 zwpyhZ!Rtu2F|104oNvfLaL9hsGghl|fPeFys6K{9iBk6uix(!( zStx%81>?k+YYC^55bF(HxzPdW1F&Ajsd#QCn6dBd>3ExM1|Zew%{ZSNm|EVbi9PIS z{o0KG5sRRyltdg)X&HITg+^`1oc^f$q&@I?|2FaX#HcJ}G#$HL$;kql31Lg>NC890YG0FQ~0Hr%2fQNz5K$*->%AtS`^s%`AOQLXRi_h zlbBtWO58YXPiR-8TcYnNRHREBQQ8H9cOj@Q0mBFy4pOer>nj(%*n1e3vsRwI93K;19KyTHPWU)oyxxt9+i`2eh96WvpHjHKJ78YSb- z^D?y@6K02FZnBKnUa~d;?4lov(Ujz zR)=3Jm9n`An1Pf4mhWUu zSoW^1d^+RYYEB-cTqGY{R}Q?<$=u6{oYZran!fzB7F<*2e>_MSNl{GsgZyddQ^nf0 z5w=p?X=y$L1Korww-t><<8`d~83TfIadcntVSbkwKtF|DRK@V4AiAFK?Zv^2KgSP3 zwqP*u3A@cBqBM1jroRU_Z{Y4X&D{nnHf;%BopdgSor>%m5>Kd(-8+w(_CYjsU}cVD?{snoM%OoAQG zq9byYr?I7ak{YJJIww@}Q+N0CltheCa?XVo#FRRE&A&`BF6*;Ge#PJ%g0G$+TrY5l zOD(M|#}L1YWb*yJ@)sLV$C{XT8uEur$XUBmzqwQDy;5MirML>VqY*-VGG}&)@w}cS zR+=e7Z|;cIYb6}not{gSq!+-=U)l`&$j4Jp3%$CM&heg(j6U9DZ`zF4?HMGTH98XH z^y>}%ZRB4(Jj z1)j6JpM(#FKSyvbW1<5$y1-`*B<%t3VnxUAzsmn&X1&1v5*W{R&R7lLD4I!vSAih3 zg6Ly({SjS%RMsBy&C9m4m_yI_eGWJp9gTvbGd(u*F>y@$E(vrIcpE=af3%4vzgHbz zH!?-C6g}r#1;4%S)q{7LQ4}-MiPaGa{#QvS?1NEAmg30_>*9xaFHJq95M|m0_mh;k zT=6HABviVZ-@vWP6s=(g5=-j?_U+BjY72(+`jW;^%**B*97^Vi(|3XaFwq-j|x+ zPm~@2B2b?j-zLY4;_mnYD^t!Alf@d-tk}>?vAIfvR5yKe^sqd~4l6#ZiL@yvlnj%u z^r$7b6iNXBlGBwB1AqE*^AV7;tTvy^Sv?lkr3O14p@)M4A`u@x?aD_yL}+|()u6-c zYyWWaag8^`u@EL&>3p(?@$E2~jj#U6&N}Fa0?ov!=y?;I1E8$(xh=T}Hz-J1E~f4K z{HdVn$)X3p>}F%7L5)wJE7>w6Th`^R;EhZeHv7-pnGe4`z&O@33WI!jr4n!*WNjg0 zE4VGq{gXL;$gU1m`g=Q9Jd@ag0P%wJVdm-!CHU2ZD5AC%0a0`yqd}Dr^`-RoKI?Eb z4}s{(^7puKYqWQ(m*%L0dxQWt6k@VC$0n*tsg_|W-1D{_`uyRYWJDPiPFi}ptL+1do;(3n_z%iNtQ&_q?cJQh(JrR6cNHidm} zgj)2At}dS+^pp$X%?#Up`POMB_W{37@r3ufqJDS98fND4nu#R(zHWd<=ofNALIgo} z$H+^7;(gQss2+Pb6^D-LevPKC9poAiQ_JI;JLw$>ZIY@agiCkpCl-+w8P~?@1{hZ5 zQS@!Rw`09OZ+SZ9K}9Ed_0|xT*J(}Wu|w(ri{A8B2$TC(C2)7m??FWHCUxX9lWJM~ z_WM_PJzboNhOINid*(TlA`C=7>iuw;l{YbkPCpbMu*5LX-O>3eMVuicUypLTzDwg* zIF-E@&%ggcm5%qpMN|4AwfW?w4P?zRY4&tdJ7w>uNL}5^n&y4q^5=6rfY@L)H<3k6 zq&WSddP+WOmwA>_fbn)wti!H^VKV#gZ3`ZDHcp$QYH2*%W7lOjR@(8rtVNZC_l36# zD)MYWO9Owl*w-6MU+R&Or}KoSDQ%-6|5z2H^QCq=+P2U+2%V{WHc5-daVK8*go^j$E)ZmSx=Q6Ua-sm}u-mK?)d(j9+Y?CgpBoSAPe% zoggo?J^d#Ju}icI`6M_Y>)mVG(it05)%K79Tb|xTRqegJ_U2|?k-9f5L?L2{7-2YX z|KxWVDOb|Z>)AgLbm86RT)%zD z_H;5t{V_3LGe)PSDUQ+X#_n%*{FV$3+E75s_>z~nsO<{(9Oo|zS(k@b>2{IC4z$iou04Exg=-zKX4)6g z!EoNwe)^NiEAOm(9fo$4Dg_}ya3$@1%S9vPK(q*{;Q{RmP(TgWZD#{hI^fST=$?ftt- z8yf_aNa*$?ei>kL$Y3{Fc^p}>Ktv>EE8fIu-zz2cCb~ahL9p&=r~GZ(#ZK%1pVMri ze4_JVtRk5l{A%evf^zi3W6cv|9ao7;P5=g2RXP%svr~6-CJa5l&GgOVBn%wK1!TTn z&DtxHMGSnp_z7x|Q5YZyd&osct@RIn+I6?otZyriB5*A_Vyk_Zn_f~v;$xvRl_hGG zeg`HvhPXc#D*_j$dZw-_8udiNnb+Gi6j%2=p!h=8P42j+Cqs!V8iBSPztKiX(jOgeoZ5(V!25qJ`WfyI_T$b<;QZQa(UJ~8L*%!1j`3AwEik@LYC4(R- zx8R45vRG7?ps9AZhQvql@8Y!8IQnnb%0uRCKj$Vl0aYqYkq>`%L>2P37}KR-Me~q< zXaRH`K#+Cgk%`R3Lgq(jiG(zO7(7n;u{9iKzax$hF5Z{I%`H$v2}9@T7eBl%WKDb? zjH6r8?V8Dj(zUCVb0p>(P}`opkuxbpMaxa5ZADU@e@QMmfyw=h`r?UB>X0k+jn%o< z0BXdEr?fDDgMz?JUXkf3GyB1S`uiZ5e4RFJw7&yyHlAY7uakS)JT6So=yE|320n(s z?d5J}Y~-TZF`N@5^oXvhw#>4obWK2P#LDXfu;c@%_nT(WAFS!v@RM2NMrLRfVm8x_ zyZ^kVMG1U62{l#$$*%)UFfGHTg&x0~*L9mra)zY58zW_X`7WEVk^9B942^6x$7vj) z3I-}cqN1ahN~61lGS|C-j@p_a!=F=3has*^-z#az)}_3!i&rBq-sKdS$F+1}4>=8@ zNOR5yzc&^#a6f9p@7=fOXgy*q#!?}rL_D>9wQPB7 z6kw%Nc|xxBr-sd>R|`kYszdt)X*977)>I6=m^Oxal9)hC6_=d?8pB%TuX@V;`K7F? z?jLe5MRYOSrqIUU(By3Ig?fMIu`?@>Z=Jb(o;M-S6F-2StGsHG97(tL!yR|SZCavC zNm|?L!n>AOEt=qlz^&K8lQ>}9W}H3Omz{terL#hiRn~OSp7%kCp-acHlB|@eyg2V6xMPucwO7Vu>E1=L+L2ywnfLPNqfV8EYmPkSyACX$`On2+ zr4*pBDNH?{+mfZzNtaF{R>g^-d)CW0?|SZ2f)HfPwSu1Ai1G{UA%%snEky=@ao4sdliewJZl2m`6CpR$6t3n4HAqch=l!67L2D6XeWsU4$Tb7HZCdz`#1R(aIHk40;#+7#WF248L&N;=m_UPy|GW4khJ zyhUn+m|<9RQ~z?7U+t7f%B2O#5u|1t?G_U5%~x)aAtF$c12JzeR``d%f`*DYLkRJ? z+zT;wk@f4E!iSJHxt|6pUIF@#lTjyM1;zL@+Yr7#u^E~~3qIFpel)zc4?>NT8|71f zG86qN8bB-Pd-%dE9#&aD{-aj6EbL^&Poi5T;qCsCr+z==2QkgOi);Ug_$2bACYS3_ z$rxM{=X}I12GfFy;Y)oZqxtE#iMW*{6u{_Mvsz80+=U|YYeq)WeZg;y@`~JkjYt*R zk$|l)Xu79e+jO~`%M?Iy3bMXjjkkJB3az#K1@7Hp2!6-n;6Hdp`}r_EL~Z+EaS@xL zlKny!#;M}($cn6tDf`PrX;^xZ3 z3%4*C*LW~3rmv?*g{z-sBRu{GhAB$+`QemGY86a>>fD>8c5*U1RU zH=TV&NUv96*VG$6NbrFQP2KB!s*Wo+C1d;}dCwyKXQ2spRGo8 zH$;{I4EgG1CLY^rQTUsTknW=x#@)8JC+;XNJ!SahLb&4}vzt@Nu6iW`lYiDHH(-P? zac8|9mcWZdwv;|(K`LyP3ho3+BPa*TYKp8)Fn#cDau0*^x~6ON5$vHTi-D}vr34L@ zC5RDW3R~H#{Na$d6bRa%^<@#EKRL`>?jU!S8d!Fuw!sf&VW48uH9>XjNRYts|NQeU) ztu8OQi#kj@EeVN@WJBA9(F0ZNnERZULY2I zBFzll6G*-AVo%it1=&^Xo4$Sq?S(Xtyi|j}GJ@uesRHIo6-6b@&$phTa6g?riHa(r z2xDNvE+Snp0=?Y8`=kJjwJIhid9N)RrSw^jDn%$sD>Ugy6HCf-^tz(5OGuCwh}}1e zcN~>xvV^{ABkuuQ^Amg(FuD`{B5ppIWzd+bYpZX3`{5fBE0^k6@-Imha_TnUPn{A> zYvI&=;da7%Ty&0=-@6myuU3FJK9w~&<+<&l7&dYc5*Bt~=Q8-I#Zi?}@PNR^c@>xp zGr9bf>Ke$|dU|X816sE~b2Z1#Uk=QY9CACQd+`kIAfAXpSM!j~L4+tsl5ZikU7=NT zqA;_rbNV?noSQLN>p5w2&=MW#)bap5UOWmlUEE@%d6}Lk!Ttq%wC98uXVxyUVlGBN zX}z1wXDSA)_KGOx1?%&jnDMo%HN5Y`H^XZl)A|W>m`{5UCQwHc?} zTBl+L2;;65YODWqnLc{6P+kg0l8s2nP!J-&8J%ux$5_sL#QRL;`{xcy7kB_&1-bL7 z5ju`%v+&n5{Y*~-XlKeXvdd>4MXL8^O|08%#<kt%&b1KBd1&pC!=?@8q+| zHgn|6QX#qWLi5wh9xAfmB{$?cr}bLw$h~hYT6L0OOAYM+bnV{g=}anU>Ef4=G;BUkl78R zD_ZY{R#iaS@t!hCgd;%dP|96!He#ERZ7)e(-~q-8KFmB_J`s|h{WQ_y6cFyA#r3SK zH?(5)q<180G;RoHl5Cu{imzwHm^9hE&5sAzHwqAT0H+=%cDyQ`q<-_&8FAYAUHW&p zw~DbTjSZ7BdX5*{$P$I1p51ra8&K<#hcO0y>gq%e9kc%yT6bZa@UQGntRelSsQckU z3o@JhiW27AAi+LEoxPDzF8ZEw2R*Jf_RR?o*3}}AqoJG=QVYfV1JU0sEk-$OzD%(q zhfgB@Y~1e2E<~3CsV=HQ-$=FQ_anVh*MQL8AzA3B-dz#MYPOmCY^x=C-YsY-t$*{lzYn`vrq{Rd+5sinEH@Q=;AJM z6d^Bi16ITjED0Ic#Em@jcC(gnr!`dC@KqE^ek5CmQim}2ShIu!BunK)w_fQoVhhzi z^`1t?IWT{JE!<7$LjUFI)hHpaHRXb`S?sIh5leB2>D*>ge{BE9Mii1DB(Dw>l}hLK z1nzb)P2r_aVST%~3kix|{OAZO0N*nz*oWl(8$O(vRgsCrQthKR7(8a1N{iwE6ZL7H z3lI6KL@}D|Q1glFn&0M`gf%O{4B=VhD&J)>J|zUwBGF@nmCaf*y-8nrkx+!vG%%^H zAFB#iFGd;HV?z=aA_-rU*^$3|>9S@WrQrALTGrU6{oPMNCd!->ITk@heUgfOO;b{{ z1No~I*ZZLY)kT9{FV_{LdQJoe-r}&Z|Jyfw!~6aapt9^J{}6cakd&_ppI18|T?pHT zY*K(ad#i}j7`eOBYOIbYNJP^6JLEsn>Y*8`(a{ zoS=|p{Q;!a1PKDfmu)`Tg;dY=)>*x`O`aw_jB|XbMlm_m361g|*?0~s2kN-i?^(5N zDA9_3A$;K9@d7;q?B62LnlzyEZ6VPrAET-wDbDypnLE_3q`C2>{WCQAq{t$n^rv^S zZu2=b8nIx+Icy`xl;7_F&zCQ4688jOql^?VaNP-&BubXWpIm(UmHR?y@Y^)zu@~De zf2HEBU#c`f$yV0ub7V;oY5HRzi}Yd6ZH@;sl+(Lm{LWF8QjS(K*{&3|OK2e0PCOyh z^A<22mFAQ)@~Qai_}42KLNwY69rV32+7@;%K%|3%y6Ut3ndDi~?-mTtDcHoGb-{bG zt9cCnX^Kh-2Y)}~%WW8=Pb2dZ7OTJ(-pfeHqh>#s7&oO}U+V}cAe*Hw;Qa~#8L6o8 z7u6zfzw%6#{~)^??c(|J6TgW0Z3c39!Vw~6<$y)H*4xrLONLJ!BM&1U{*n3}T^n=3 zRk3JE)ynT9_vWF!ca=1PKbSVtkE9Yjc=KOk2KS`&CnRGdV*jr4(Ax(_7}Kzq#Z~(* zQh@wHEN#)y{cbqg7FQUSd67piW*MKW|47AqWR+4O+uT1kEPH-G$@qFn3aExXiv~HB z>|aJ zf~Y!)OavBBc*ea`Q!E4593=y$2KlFx-xw&HrM1iyG@Ir`0XQs-$fak0F)q)2rOwhf zalk~QS2VK~qa+hv*?0cK1`v|)n6+*J z9Qx;sZxc!*4H`9lC`~sUaT>z7o%ZR!v@rQbDBZ`4nPZ6tZ71JOm@!x$^R%o(AouG< zR&KeY+L4#Hme0txwj$lDzva9=TdR#aIWiivUTFN2q7=brF=VFJD)afr_#K4bw#4J6 zP=5Cvb3x3o!-rbO6tWM@6aIxh-6JD^3>7_%2p`_0>={1!%XznNfUicWi5tLRTL{dn z3V467dhFqUb3+bT>JXhki_@P(u?_SWVk}l57s;bhtZe)j^mdb87sR_!)I-&=<3%mO zdx4_nN+16TB;qE?LBqgHxmR|B#Lr8DE@h`ECWOX`W5cubrxw6j3AKcCiNi>%uG#CUC|{ zwUw{1{B^R&TVe2ZvWnQ*LdER*8vYVQ@EgeIDyc1r*!<8cYK3q;ulebBTry7)a3J++ zaKma?-L=W<-Xj_c?5R_AYeci!aEFwOKF;-a@C;o|ZLnW%L$gMwvV<4fd@nJNTi9Bk z=-??XuwAEG(XhJD#>VCDmdq{onHA$XP>UxZ^q!xT#(%Av%OHPkhX$syH3h_!*Yp@G zlVx)5L3)c~0`WmIk*-@7_0N``WhnaxcVVD^%QN@|r7ry~!~<{xUpLnAqA;n0di(E{ zZTYzM9yP}Z%{`^H5uGlrn>ahV1jB1e{jDO$j98&Lm_!T-n_ zC;Vcyx~AY{j#|;f3et7>()!*z9C!(X8U4ST){m^`8+8FJHgwW=8b&lBe~jJlv^`A) z6TWF0CE1NY)w5MSO%I_up}?J?jU|8h_={`bs$&pqzUPxtWlF{0LvzIXlr*@b9wyO@ zG6(_`IYoKYESn!femVK#zqYjgmF4lD9sC#mr2pnM{6!-mWJ35n&NZR6;5+x2e!Z!# zGL5vrV8rM9_tnpHplkXig80Qxx+Q^)M(-~VJ2Xo>`eO?m^btU`cL2>b|Q z{3Ox@OEiGO)+Qz#U~?N!O)Pma3#&Ixw!qb;iH&O55^vK6)EmA5^L1PSfg%1LP@j87 zt}F4~Ti@s$5HKo=PcSKwxrATe)y~FWh+56h5ysn6xfcX~1h&Cj8h{Vp|9q*<6=aEh zJx`ktB#ZX$*4+Bm^+`}ol+fCG{RwR!K@R>RPT#UQle|lT@Ub%pd>+Mr13IF1=W+7%WC^us@_(|0Ld$c37rS7H zr!O2{5^dZ7ORwHo0{g2nQBGW>u6yn}`M~EkprpBwHW8eMtEZy}E2|C*ehE?6@9_B` z=-Sg$!=;w@Q-g4hfw$-ZaH6t-;`;V!;W)Pq4q-*8oF6LHRyB(D?D_{^&@PArO|ivf zP10s!b{Jxc(Yu~wGiwzKNdyI%aG`ZsbN?~n5ObP*+5-^fe;Zb${WA7&qYCw%x2sw& z)8DLa3V=Lb=7_>=3*qZ-z7OT6plW~eYw*+Q&hf^&i2n|I(|EojCFKpO`qi@80m*0{dL(#OC_9!D?54 z2zDyi+{DJWO9<2-dZ)FLdw)-L3Kss74z|9;# zVAv34&iMb)9i1$3E6*noGHqM^>u)-RvKsN^hNb%GA}^(284SP85{2&HLZJrG)zqAo z+k2?*nOzzb?{SXvy-Yc<1Zp;VhxVK6v)_>Fy&$p4@B=LS=G@BV=3k^2A$w1YZ8&!d zb*=CnhM}bF#%xwiQ$1AFqKGd=E4a^oe?0xk$jEpLeER}Hj~rM0Gay`74Oct~JMy}f zr7fV1#h$oPSruLlNxUBuabpTZ$CF zGApWIk{0{amyFy|6E!)kO(6(dW^YfHR@j<1uTK9=_*OWqc*Qe2w3=twbo^_686HGzQsK)<#-q!(6q;0$6u~8)t1J5zGFuo}SJ@JML z|6Y>&!(mAiZ^xms?0LEz+Tw6DvDt5)%1VEzr>*TT=<=k)Do@*^xfN@P-Uu0DL=@>v za*`(@=PPXm5IRFw*A(BJ4OHZAzvp(I8d8~5W__uJNSu_K|Odsa;~+X zD{S~;ZK1*-{bajsW~L5-e{H(PL+?CPc3NJ)ZESiwh4jjVnU9_s*Erkr4+cYhG$}7R zp7D)WL^Yl%vsO_7>!Nn6Z%;F86HK-Z-m~n{4qP|JA$$QlwVTKhUdyN%`Xan8E4NJX zKMzmMG^&D+!+QIDjZ|KOS@_!eP7$P{sf1}2D}Us z9AALy86&`Z$GbXjN0K=e%Rp0ve6j^&zA7UQPg7M->JN|KI%JHG*>hPqa4>4t5Uf2o zqkfkA7sCvv*#Aa||1oSXc%`V%^o$b%d#$0&n?w7*KIhN1qV3$FV=FavctgHMU@t(E zVPTb>O$ZL#+lyHL$MAhx6#Fb;p^x(w@mcmOO-|t2PD?LEGWV65F*XB!Z?aF>4zA-C z%T*iom4Yso+mrW^mGG$)zLDgQkq+R^`~T{&>xw^%akZ18V=~})gza)favJj2ME{4j zLqjRTj3LJr*Be~azimgyZh=sOjzwR8uNflVc7$@AEs(FRDCueqf}eZW`zksDKx+In zu7`K8m2Mn{Q4=wrHd0omclALzWvQgp&CuOh@D3J7nRzJC@?rjq`$SDAp~04A;TW-xG&eMuK)U%jVeGoP;uKW{yDZ$Qkz+hO=o zDlC1%XFuFPI)F77-cBcXwNh)Ln8{!I%d*(&U{J+GIngjt7#CyGBssnp47Lp~`TrN<~cQlSXkO)zCX@9>%w#fOm9Z^^2x2u&|riI|q5G@dVr7>&#ASsdBT0(b-HKecz0|JK7q09V4~IW>pIM41U|#G4^e_zu>3pHJIO( zH;_~*yF&TelD9omE2=-Epg;Cjht!S*`!@f0?xrpmX3ro~q2{$iy3w5Nb2+r;n|#<- z`=H%IbbYJ_Ios&*%{DkaTF^-Et21Ip>-)t6J$52H$Vn?>@Aqi#dAqznmWjhjw)@Fp z0llw$xbk7vx7=`Ct0ZGaUD5Z@3Us~TVzKH{E|2B!!XFGes47!2VPkimvT)y`+ zhFrJ0l#fRWKipLP&WrAk;dC6uidpMhrvZii2NTz&&+zf$gvMtu?nmS+9W1Z&*^m^~ z6G3{Hqi)~+ezvZo(02{K&|67j)j8*GU-}s5^lEWmCXNarBu}KLkYQ3(v|?{LV0lzU zE5o5F6L<%h0F%`hVq*>3o@;yh#nE2#m1Xy)#9O}>M82whX6H7(!^&)f!>>~t{P*bC zAG~?)M3_q+p{>VN4nSxdaOE#s&?NeFCF+!F?C0sd*h4;#@!O;GD%$~3={0+n?`rh@ z3Jv*1xQbr~bPeoS?X$<+=D%_3=wY{UW4JK~-&KVd`reNiJKlbniNPb=9YpvgVD}-q z)OrZwZY{@(PU+ltP?CI_t^NFu`?@FSYxSHOvc*Tnn`aRZ%;n~`Lumpf%evEj9-pg% zwJ@^qYPC|12gUGv!JQ9t)$Tizf=+IkEA^f>68lS4xxcg5)j|0nA#}XjPBH5~Thfs81G*NnzSs5{gUxrFYkL{)89!vhsulq0W?!RA6Sn+W^2=8 z&jre1L^m%bx`}& z`UZRx2c9QKadm>fP+^Gq^70As!`4oX96wqc1!tzz1Lv1EntH0qucU)Z zufUJv;4cey=){(P5{TKqb-%#Ih>m3MY$jz6j^We_Yh}uOR5Sj{8`H4EY<#bvLtfP% zGRoyvbXM)RA9^`ECH}ERyG?=GGkf#TU3{#MIkdqT+Mr?rpJs=tLmQo-jkhOOqHXw6 z%gU}so_ooL*-2$%sM7>GpQhYT7N_Q@`sm!VU_j+O>(u9qk7)9Il4^ut5{gWRAx>Ca4`qR*5uCo!=JJagw4sLB#R&}t|5FEEJ$!;$=UG(tGY`=Wu@$DN} zxh*TVdf&DG%H)mV7gH!=7(nju`&^q?zw3e(r^kM$`T^lH7ek&!#5MLd3nWw`yhA); zt?Ioz)4Je)qvPK&^7s%5yo+?7@LsPIhF`=wIr&{>3aZ`ap$nIe@YLF8b<39$nKyNu zI;>E@rYOE5gPvf(rv>;W`%~>mM#9C-@1UF6=gKK&aS&`m7Vu;RF$g zkk@C7Eu?yBZnSnk?s3*X?f--b;-IV-QVEbhZlZgDhUjT#pW|BKepse{ue`s^>gW<# zMV1hTMYHvs=yC$~Nw&2NSj*nL`TcekB+}$N!|SU7J>7txs(3LSYgKz`>7-ITT~L5i zn%=8)-e&?^5+$oV5!Q4uSvuoYXzxT;a>oe#A!^_OcM7!2yN>1X0+ zCYtH$khTq&K!mhUJag@g-^qYq#phqK=Bux)|Jv<&#JF;LccG9^S}43yf0&T$ndl{B z3of~U8$aC`XLIztF-Z48okkbE>;9WJ&oN{*ifdBa<+t_5FaMav;k-s24I>0fx_%y7 zrTQ>JD1h?@bTgy*#*63{RP6Up(VA zC_z62_yk*-czT>@LVTW1f~)<06;VK`?U^o}l0eGi|HH2OznsN6`pJn+yzbe{(~jn7;Bs{OtnYZOn;0m2LzSmoG9nucbn8wNu}AA3aYMbA-0Ct*ryLuiRNA*4uO5xTN<#EbKe zXkK6DoV0U`bea9>lHJ3X@-t`)$Nw7x2TPq?^PRBGj+KOLG09Z^%iv`DVg}(u`L$-M z!*CwgFnbsj_ylEp5pi%|x$#iR7F2#);#yhJ2oeT+X46v3CNVP~g5hW2R)IM6GfDMy zb9S$(NC8giX;Yb55?-#oK_27bHiR#+K4dueUZNwP4-UsUEuk zgx?r0@d;lTcODYXgW~b8hJ=MK=5Bx#_gS;>rjp4K@umwn#z-0LHgrhNvsrG_N z)l*?%A6?`;Fxy0n_lAv$cfrTF3Q(^3X(y+>BB+!$b;$CjRMH^2+ABpWfOXF%!|uPE z`G4^M=DCnubU3%}8@qqCMp{G~NAgE<8hllf2m|TUR7{L1$Wr1fpgAd zafllxi1lzgFX9=02Yz3yHP}by)bSiYwu-{y(dBs&q2{fAW2}?2*+#b!I#hwF#H}#M;bt z>{c4<-PP^J#^u4yg$)CuTP+S(>&MF&-kq)-C?^>_AL^==hXI21z1t^vUa5wDo(w5D z*S!qx-JW;~9pTSM=3oeGZPfH7|AZ|e_z{XQw{SN6NK_gh_>J*XLn0RPBke1-eJkkq zLsFD^xGe3&Upj!TPSG7Z{_2NL`%ir4hffrGt+CEDqUIyIKjKfQHqAwc+7r6>ge1Za z)Off1fX0y4g|CT0z0_-qCR}GCkQP;)@xSzuhglSNZ5CB#MRW?sEfEmp-C1&s0!dJl#{30z{{`~!hj@4Z z87f(9wirXK_aNVSc22CW((YUY%kz)=d?W>9u_2iqP)BZ3!3^!scKZf$?#d0o? zK8E8$Hy5_miz^X_+K)#VQ~t{Q&Wq%6U);a%iktO(Tp5)?6ErW*HaL-NUF#2 z)wYDo*Pha`d&)N@8ArTJI5gHSsPv0;>;3;YJN}C9mGYDDft=*7FZ4aIYSCsjsF8zfiZ<`*>_@p^Pk|-gl95r zLwp(@OK$Y<#*Jt|;rKsebz2dgSQ=ts%I=TkiTC2F&H`C2nI098OC?!gM6mganQdtx zE9ptSDMncQjdcru{z17V+g{KF{A9xYbKov@zj=DRYl;u;3DFgf|6R}NU%o(_-E-xz zd*K+Y8axxXq7t_2$*NHqw>8O027vQCZ-W$_O?l1^p$W`FBUyQVJF4kIG9YD@T8NW- z{W>?QV_DWIE!f!3Nxw|>KY4{w5+<5R#VjtC7%XP1W|a+ISLdt*DE?NzdD=@6ctJuD@&?Dr&I_zb!w3DWIc`ui0dtK6BY z_Q#P^{s~|0wuS7*dQkiBhC$oy$Z2Zs_2O+S#At#1{(!>K0b+4_Uz1?$V#LyWvFFS5*IMCRyN?b*@VsPoIMS7%X2CBD4UHF0OXD~@ zTnBqjcnc6pjc($F4_9^^h4Uc~1=FsZ>&AQZCt1*)-dJofKt7izM@FB#lTZyM6PFH1 zU!-d`S06H?c{W$gl{E``3;rhCv_SIvr{NJz-QZ>HJ_&qr_&fFH>cTx&%b>)FAYQzA zKAdWAc4RuzJjz~R$#&7i!c(5uxc1piyw_4&)MjFGv5=W81*-=}FP_OV*Dvmc>MEM? zo2PPP*ijWlxNJh=A{|(W0coyr>b%;B*WW^kh#EZA(^j+Kvd*&I}b+ix3 z$L##YrYTwmjC{fs?eE&e;svf)<8>$JoRUsnfN%+653Kc%+Y;FvseohTdqm7DP$eLH zu_}YVAlq-A|Fo9fcr^#|-KxnOL1%hWDl z=Jh=-hyPx(j{COQJRuT2iV~Uz>xOJ-97PIGeNt}GOLQeL1 z9mj^?quOEfjCi&}fV^aH9776clDv%Wpvh&?C)d7&NK_&!%Q6XxvD(nTLJLi+l=&9& z{7a&>S>gHm8F8Q{ODof zEX4NBBijgh7xSgXAFU13t=XxXbKE-?yi^rt{Vn%ts|*&5;EMLE1)8%b1A6-HXj4ad z6fp$McjE(&%v%&4W^*4tk|wD^Ow^cZ8^3T(>YlARexj;++y)mrx-0!aln$7hk=Cr4mUUW4cP@!1ticSRE(EPL% ziC5|kixP7g0dqHgl^I$mP&HmZ<@Cx8E5^lNdsk$R=Y2Tsa6nORLA&F^72!Bx?XHWNwgS zr_p+$a&0dL!<<8Ya<=}5AJI;ZZOV?54eFPPKgc{PE#k@-gDOtzTWR>haaqol8nVv^UL!1S2l*x$fp{f~ z#;EG;)xTxo3t|o*8JrDBoq8*3ihiCCf;XRW+E;GkBo&Azo6%D`_q~!0nf?_>&7oZw zd-L=4=BIy>YXvY_-v?bL68|A_4qS^=#V-Tw=sBbCrS3A2_^m}Sji#d)z6)7@MS~}n zYRvn3bc?wvaIE|6GfK&sirDpr%8!wkBvs3acze2U+=emrkdvC9&#DKlaH|o&e`h|L zpBuF)%J6r%nJ56*j(IP+&ifzN=8E2I9@{4RR@$g~-+I>T`c;FAL@vE64%@2RZwtn3 za69}#9Nts1^L1HwBOX_ShAj+ujmn3%nmoR^M<~+<$cGeX^YnwHUQKQSk*E9Jo@i}$-?d~)utBiHO3H(KR} z6hzzrt4KKKT6mOmy9vDhtrTt07qIJ81DP z0~9WhYw*}B6q^#6h2sENt0$>0eaty{*u(^>I2E&K^AS zdC-)(Ja)*^JesDYfR)&sKcE6mqs`k{^IR@ADa(nj(2D1f)W?qN-|wqmT~)hR^2MbR zn|PVcyr0wFDmy!+yItr+n=%6R#z+NuXKBxMtjAKk`E%si**YAP_)# z@*jt}^e%HWJ#lu{WW!PGuYBb#k2FhcGpJ!kVWy4Lyr-P6+qQcY}ni15g@rDN?f>TA3QanTHn*h!NNM%H2T^m}7@y z>#DNjwAjfVtiIuR?5L-iGIv)6Rkc@0vuxjJEJ`GP8o^z!X5vk<5kVi^6ox7zK z>Ei%K0!yKIU2gjY)%F3cvnZuRgr&TceL}9x&fNY`d(EGdF!2TO7!6r@K5@i%ijIY` zl;^NL>Ku%>?p6Cha>4M|6EdXSPmOh1L7lAn?l`m~pdqHHdZh0MEjJob#3`W?EY1v& zl@@YhNM%lpFYg;Y{+h>usr>X>y84ZwAy==KH(aLuHp@nn-+V+gNuE{+0reg02QH(0@x3psIw(}Ftmj*RS$olzR_OuJ2km!V9u zkM!b{^U?O_WDIw;HsCbJhEzw*E4vA5dZhF(4QZDqh?g4)@xTw+n109eijCOj=? z)0c0zRM>HFYOkR|;BuPbi^0##-u0;gMmRlYGX#KSov+$54YN-V9;UT3+2G#QYVI@m z54649cbo|#y{QF6#EIQt9gao`*DJmhzEKpyi-+vG?o@-3+v0@yw3NLFW&)4pmF z&P}~QpDn-Lvt5?2Mi8dYyW9KDx1WA(u)ayNhixQ#n3~rr^8*O&hf?g=hxt@g>K|W` zeY3G&58j7fA%y2Ac>%WbR+YBtRvwLmq-P+XZm#n62rN{mujT2?*enD})5%ISXVu&z zM4F=t%AD>rSAI!RaWXoJrxl-sDz)Zy%ND*_!#k4S*1uTw5rMFEP1A)=3jWjgG(=hA zHZ@SMyiwmyR3KQP?UkD_3wzg=fmpR8N(ycI6SgG{biJZ%Mk^VuB>my0DUArll4U;% zw_$2)G*NVCwHlcW5mCeiXTJwktz z_cv6jabZ!cqa?DO(`C`c9OGjbD@r!WN!LQVuP2(`6PbVf@(-3^AVXGPY`(5l zFu^Q0>l;g&Vu%5CR*CvvxupTHDDX{tFk*Mz_l<^7%QmN8_vkv4CvC)<0VBwHV=Gcr6o&>e>zdyv$GM zunyJJS~R+|;hER6XtS@s5_zkjj}F@zz~kn~56UZNQGW9Rj&y(_6x`;a!!#!MKWIVe zzmZlf9E}-kgbCB-pQslJjHob)=EN`UIP+v9*N8(~R?ntXI#fHZdLm~BQe_IcAZ@0f zNwtmP=B~ELJNS&CSowv0#*QXhyeSKYu*^SP2l)&dQR}lAbA$rgY--rd3Owlned6;> z{MI--O6oeLVT3#$9x6kdU&0EqSmj|YACDuC-G4Uv|F_5V8jX3{08_fhyehPcLnjr< z2z8Fdyq08&Gcjzi6Qu9V(AxE3aTib}(2g}RZli5F&{FR(RV*!16B^j~gp;ic*Z z#q-wE5P>EkYDMn)(t(ExWKDDDpwxDwl;_F?QY!*<$nsN1Tf^~!{u%B8-^l)c>QwE3 zaRI~9+(k#%3u*7Uc}e$u+!N570>twlB**Mo#ddv&rvKh-r{4>;{FP8B`2Q~P zf2imGy)%X~xVzxRjTi*9ogH|GpOoITc<8`ihb)^lkd6+*775{3^%fs`2@ZZ^D8bOOlL?E-|kt2FQtO?tK-(gf^M{eTl9d*bdM3a*>5;vCu-@rzA|Mvg=>%jeD!JCk(SpRpR zqEGmypdyZGX=MdUWhJG0#0{)5EiLV+>@IV#ow4Oj(myhBczRFKdZB%b{`Rxgxz}^Q z09j*WYbTdbA)D!za&zvkX<#E;=Y;<$*zgBBzbp8$@hT%A<}X|G|J?ZtL(8bW zH~CmoQ`3GpnJp9hT$}69FNG2PVH!M?26CVM3#h{5uZ!-B?lTmO9q?hyZyESz`}Ih~ z?tdQTKex3R)Yi(VbgKd*+q0d1VA`K={T#eCC@_h5WXLcM+T~+hI9hCNE$1Mxt9&~w zso9>qm)-LJd5~QOjB1#Sm+cKy3Ft^dx1j;aPqK?;TWn%T2UU>I^J=;P{ia|KP#&Ra z0N-xDDSRo#?9O-_hOQqt!6;?-)6JQg;2)oOH4a92Uu1itX>GTTX2|m+sRkS;wz4ny zro%rP#n}0ZD_9iVjQuaCjJIY?r*-X{wil2jtH(CH@!SYRAY{GV;lG#=0ruFnRT;gM z6b57e)8rX>%jht%8I$Z5Tfm8?F$!|}&m9mIaWR`DfoQX~QH8uNvBs?)?!Ez+Pa8|M ziBco1o=<$U>%gtlR|vqjL3TC9+C1&=@d}Y&?EidCmZ(Wl8tBNi1@aP23DkAgc9tD; zv8%g%Dx{5dT1POdrx*iX99gTS-0m%RJh?!wppIkTHk18`0Rn%bXO$|&NP=(mrtkkl zk|NImi;zS~0N=NfX%2~_eQUZ>wxod{O8)j_wdN+YT8M@$#45dX;+%EQ9vPt4zKh3dSrvjbc^!H8n zG#?2LbY8tLn;|mX`XvxpiI|=KL1{0Whl3TJX8>sA%P=Zw<`>Amx!I?PKCRI?()ZUq zZ)m#cmk(5_O!DCY4~=UUwUln0ToF8NFMoUzh?(4A*mrL zk`#MA2pJjw>3+15L#iZd^u`1&iQ{oIl~<+mm2FN6_6}}+NB(|e2}c%u6W8MHYw5l1 zPa#VTBT6EKZk%3X2U$HP*&yy%#p>s+uqMFq)45ZF5-wib_8UaI0k>Ul*twDKvbG__ zxU=ICspnkY86a}|a?>=hZS+#CmS|sGArt-`JbK(Xbqo)*iPG587$be*sK~}N^Vd>Q z*Qt^OYkz}8QI?Nw<2Ps@F<`-;O7~V9$xYj$H*D5cl5`3g_EjY79`tX)YwiO?hC3`j zE-0`K&N>=;rw=KWMb*2Y4sQw*p-fkCsxk9c#uzxrmMH?c*&53?jjAYlO-s+Q`6%|N zRpQT$5<2Lc1&gVb3Ewz@Hz)EfF49bJkP}{^YogkYyz+G_5Nnp=3ZHqLyC#d2i;>ZygGb#_;#&l3Z&Im+=&}P+QSwl7{kvX{HM7fF{ATWI z7)gd5LijtJZ_~Wf46$CeF;u-a#CJ*TWPT;*q~+ z)ZOtF&v?I(=?VRsV_78BESrkJEj{NPGcvF-yfnxMrpy$=IR~Ft#M3Foh zh4-Mvxq}_!!wP{2Q>L2+@ol|ZPB5d|5qN%Ki!al!`Ji?f2mNQidRX^C6@kf~nG=~@ z2hTK82VI^$Lb|$Oma4W|#Nsl-SXZ^MF?(ok%(mpNVug3NS`AtRn%ds)yxSLEy^Qf} z%S*IdzWuqpAYc68&Mi_?i7+QdRNQ^5y8gxX}s^&xO1PLXL!KdjRyXnve;_Yg> zu>@#<>KX{kG;l6E-94(UZlR;nVu&6TPZ%T6aUao-N6Fy8$ngg z4p~*ag3XhBqn?XMtC>prg!QTrG&0T8>#10?bmY;vpCuz5-`q@DkSpXn>Ql#u6U8#d zp0G&H$tcRQ--Ek;6c*sU6zhoOGbzo#IREXS+ecW8`>JzKyUKPg26=Njmqx|!Z#|IW zD|<(^U}dqu>>yKiTlpFDQdLKSj;#zh!0xEhvmJDk?0B8b?r_vN=Q7>6hOObXu&C#doO8m1|UX22%8)`+V;T15UMkeZ$+=(PRb-XiW2ipqr%-*pzT(~jU+?Ox zPv3+5on4N_IKp ziisl7-G|1WmB!_^Ha*u8Ov=Iwsz2&}TvVnqnhh^69D%M$BPaoi``G+jZ z)dMAixX!-6ZW*#O9`!Y?{2RGo6K|(qv+sAWRj&aOJO-5z~bVb;qTqso5$hDT+O3S#J;(a_{^P`0L$0imHeA=Rky zwH74$CvHB0cpO$IQ7#T(!853?rliIj%Go5n!lNvlI7`JFvKh^!-Z1bCCn zu4R&jcqXkIM2YKXc{cUy*n-fY3!v79Gn|;V(1f3}_r|c7+=ZmIvMxWn=g%eQ1X1Tq z)4d%Mz z3!{BOar7zel<9E%awc7nGvfK=G)o$fWPj*zvk1H`witJbGn)QRK5JI3bhq6 z)jJz78h>(0ZF8B%AM;?${0m!ZeJU{b-o=;F2KNbX<%x@ZL{EYd10|^3Yq9;LB-C`K zLjic@`>sU1qtwnqGaICsWQJq@{a{FkhcP->NJJ_LK%kI4N#faltxuqk$u;plY(}9v z0bv;h>Ubci$R0!Ltn0)QgXP4~aFp)ztugYRQh%FF?L4(AkZk;xUu zW6O>++mg{Wr7uaGGkm~xUH9?=1I4VprxsJW?GQ_fRDy2L*7?Pj$=g@=VJZXS|E(?j zuZ=Bw>Z`Rfb8hzpT>D6rh~02r{~1sJ8M7JW4~_`X)YH=&OxnB^BI0uGiDcUD-tgYB z-mTujg--fZuV1dnfT=?g_)DG+0V)b35nq##4%;U^=SFpCb*k%L&n5I=3Gwm_vg(w~ z*5&pN7!XV|%ZhifoU~h=`XqLJ0X44<-Oe9V?%CcI!>qs&uP$lQr=UnXTJf0#e=wT% zVLxro8=HuvoGQ~h7xYvi?dg!gon=HhLug6zJGZKPWN)!so0-96C{q~1tK<)>E5KI{ zH{gyB$J5jPyw5^7FoxQIV2u*XCNQ0lmM=X16LY*1GJGU?JBSevw#Z4un=NT9*tkRX zL9Hx%$kD-vSRe((YVh6DeTR3TupmLn_thmct;i6?kw`*N2E$v*>j7DhG(5{p3a{jT9 z6x%4Kfg2`tler(IZSgM9XVEhoyIX;|h8vRrVNsqKg`>#Pfuw17htxbU4XZ9wP*`_q zW%TFZ6M78O9mp5D0hDrDT(K18e0o3=d_ge{*AeEr5?=4|(x9Zt#~C^w>D3Y+jOqm# zU(iGpb^b&-gI>bt$KWHgebX7b9b!(7{+ECH#XHg?Az=QuGdFj{fv3ca*N{k`U7q#d zbrU3A(}-w!ca5iM9`Jrqc|Ta!GAK)GE6h4L(9m2vJJl1am;E~a?64hGwp`)%TdTmH zD+{D)*ewV_R+}4wk`I?^R@0WKT!+6A_urh4Vwfsa0 zZ&q*%8REg_=m3>#kEI#eT$I_-2cioxPByP@ihRL;>;@wBY(?W^G-FOWN6> z=B@L01LyD+0$C!%(wFIzizy%RYLL2!tRPX<>Nr5=Lk2=^v$Rf6&pYu;Glys7S0EOr z1UfJJTXZ3GS*k9SVj&yD-$mkZVhTR zH$v?X*+&$NDbzMoO(&sGQ{^yph2x=^OS+&O5O$~2;`y*s74uSt{$>7ZoZ+q>f) zy+g9K9mCV7IWz)tc-+zXa3u2in_XFxD5BtnV!*Svz9jfYF-4+>AB;Jhl4E-o{vh#b z#LU&hzcOY_GOa5hMPD6!p%F~}ivwE83WMd4D4D$sy6-{aCVQe*=)(tIEQ>Z>W+O{KEWsQ)FAY1VqJKXC<3}O;5YSKJ;Yt0L ziPOhOZk;0bRU;${?#<(M8VOjy3|*@j6XE|2$ZqkYw(> z363fgDkc=vB>fhejd$u7r(Wa-b8olC8ik}agDRVft{t&=b+c8v4GQSQJI0+WFg7pBwv9G!bkJRWgW0&#!&S-NgP0@H4(|nQ5;q3-w7}nB1D_ zxB@S_Z$%f-alsa7K1JbLm?S4UD+vtloWJ+)yjiRwbIq#RYVyVF+3J)MPuYPP3yW;G z#V2+`!cWJut-Z{0*l^Z===IKCWZs@GJ-kPX6hk22?rI_h)&%&AzI7!idipGJtkTt9B zD;mvAxV1L9^_x?2=WPX>HxRIYB-$6>#eEaw5ruTyWwtc4wK;^n&d{-zpMCQ%{^-nd zl1(1_8i1iM`G?~Pr6PeTxbe#)TOfYWdKtGlvteZ{sQQzX#I z9E>&B`j9%BBq_;_r-9mjC3Ch&-zA4Y_nC@TD{0UAYFP4hlt%*O@_agw9pyb=`|T#j zv1R|im|XpLmf=cIz6UMTxRJ{v6Kxk##wW`JuA?-xXzOfTjMZ#;2eJ2mVp|*HaJh2^ z$h*ycNbrSGw~b)T#O3zluWsaS1aLEk(++PBq7fDoWmrMvxlx?XVscLFa$F8kHaj*_ zSK58zM4|L_$`V=hG!hr0@#aPtI>TtSOnBV-YMpmY8*suQI)}PJp}Q7tZD{PqHFuwBxu&C<}lS zlHnptH}b<1$S196KheKET1=0l1+#j~ImGd96x?w#M(Nu3E(!J~3E`1^jJVe5tlAU@ zElu|CLLogumWuvFXf^VaYO_s|pvBl5dv29>Rg&|qnj70XpFy1oYe<5# zf(_aED2>Wb*{b1sT9S4L_E*D@`t47+BzV#Jshcmk+}p2@1E9Ifa)SO=qa~52lD0l~ zc%Lo5!qC=-|InvI`JRV|bH}vxzv-`=*oRh8IlZ+Kp}TzCY}K~!xSn0#GexDE^<291 z)DNlodD_!6O{tSzq`M9P0;B#oJ!1TratB**atySax=q|v@F*!dwOYWBAMFX5?ctIA z8?&kF2b!%RF&Ax(7?)&?E!By~EP<3=+0`fFI{b73s z98+x>!KX=?5rvT@&l;5)Ip;SuX3moKgtCPDS9Jm%H$u7xz6|Jk40d|IFr`pyJHO1j z^zSF2rKa090g*fvbB}^r{Z%k&dFnDDF6wzmv!e1U8EYebBpk#1m0xIRX>Gh)i|@V! zMdfKF*|vK60zO);wI$;~!y6jC^wrB00hTgDU!mwcu{;b;SV?;sb^t-%l?W|1TB@Sf zx7m;;sAOpzI%D9Wj3%Pm2n7;{MLFZ*x%b-Ukh&es|4{LLKE@pu{8_!QA5&#GNt)Wn zk2zw&^j_?E9A3&+af~>UAJ)PyXg&^*B$OcC_~|%o97(5btQ^8ewL)KLO5WjMyq}*b zkAVjE>k&?Ra0DL+jbPQvw4z-odkl{WiipUI%wXOMTjs6tu#qAZ_c3&fPNaRMGUCP&Up-JaKNr|JmcQgZpoqEq8**mW7Wrx6Am!Z zo=|5Ed_e8%8E&JFbD~AfkgfNc#pmozJ$SiE_zr3*C2!s(Iq*hNhhXk|#UF2{nE?BZ zM1HD?#hwf``cERg(ayuh31yHk%Y>%AmcVzVVaxF^a5Pj3GvIMp+~XI*g?sW4Gl};| zgT3>2Hq_`W#D-4hlrt>KejyxpOwOMDgCjfH*eJi)MZx|)Cr;a&SFKCsa*p;qd?T`F zy#l-*@}{+evzJx})IUc&l`g+q(~+a~-SeJrC77&rtKEU1&S$50<+BP?jwUyDYF4H#;om z+MZi&A2=RlTmQb4haq-$(~VsdO^jF5Me;j0nyUm_wzBbv2QW17-Z4_BU< z=dGAjKH9eqnhJpBrwT<73idWm8b;}`%9HgW77Y(7la*gQHkLJjBy>0DUAb%DQ7X1_Xb=g+8%76 z@YoiT1qP<6BsF>_v_POJiMQ-uFHZ{L&kf?yH^WpQRiuyhjhETmhM~fs@2lgU)Y^Yl)*^kHY}EI}5$IFL zwO!t7sBa0iClwR-6(gKlNu>#(NT0WYm#>YQMl_eH*NT&ELkrv-=r9z#>Ks5|EX#& z*XdPB+7Ah{&Mlv~>(GKrEjOoZQ**-LRnRHm{;w1arFF{$pSKq?u z?SPo2ja+$DBEr?wh$ocEzNI|B*2A!tKP@G>7jD+P>P|Hk&=S_v>f@N7{59C?p4+fd#!KfK}6B@}} zZqG5n83HPRps71cbGNm0@b_#z8+)L1%=JmjEaO-Y>?>#4MkcftxPS;xtnI7n-QrC7 z#8-*y!XY0tHQ~+7wDLx6z;DPZE=?2F+6nWW6GZz|Z0C+U99f{_&^ior1S=m4!@%Lo zqut0HXHRnWR8u;qvUS_zio14f?jN!z>95Nqbu*HgySE5U>2rgnEvGt^+&Rp1lLcAv z83l|LF`w0AQQznXWgOtE0ged5QOCH0(Uvetai5`BB}Q@5bWN&%{?yfC+9pxGxyW3H z2_WSjjbCLLF1qUEret%M#lVcg%F=E4XxgFlg}@YbyOt47YRZi)3;$GJ!U>}z#q`Wq z5~QylGKxl{t}`chRKf-cjd}^AW>8@N#xvW(gcGuk3PXiTzw(xatQ@W~t%suFlQ3b) zal0tpB+(SB29@MV@kQ0h&hC^!=fjQSPzuGt=y?VcHCObBC|hFEDc(|+T2`1S+wT4{ z&jub5W-wtzQQ@-Vjt)JdQ!7;kwhzpH)!*^c2<4=q4X3dG)qhlwEl;23g`>|=3A3UT zRzY?HmbY;dUVC)7ZrLN$Sbnrp%kgg@^$Z+?dA3ZaA6v0GzhegaO%AFnYMbu`WHHb` zJC&1O(Xg;&M`7&wB5>Ko@ID-GP0IN4dG1CTw^V!gR2Zg6|3zc71oO96czZ9-+}gF7 zIQMy26hrf#yTzzR-k)KI$@egwx2sY|*>V=cq8bmn~CB9U<+fR;>FxRcZ4AzFRwVXW=L3 zR?)HI+)={o3!UxjRu5F-uB>te#nXM^Nf*+ByG-xwfo@LrA~9F zaFdIQ%R6SPaTT3hu@U&@5@i<&?(8$}g5T|X4V(LD{-yz*^{{DO_#iUAU(1+?& zmZYPXH`j*Ur(^0Y{!T)(YKWT*=4(8J_19>FMMRhBL`>87KZSZ#P4RjK0&xPBB=LWe zmlNN=93xyOB&&EiNz@}tZWIMJ7gWSITgx(!9y&AjpZXOx)r`VbZ{V*`Ub6S*ijl~n zmHd?bl+(G!**fTwG-dcrf6nN-n;i0k2Z5t08HaX@CWTG_G?hCvRM=*<;zTOy?m4Ed zqvl+os@kbysyFWp299d&jKzzM_07dLIfVNxU+LOrOPW(-9SIGIuq;zoN#^5lJEN(i zH?^CDjtYhhF-8OlH?NF$ehe+=3_fS>9QvjTouV{F!`t0fB*OV1{T5FYwGAyisI}!L zL4S?zf-&B#_0PHoqJ~Bh zDpdkEZv{0-F3^Fsl10jQ% z&&bH8C}-j{dY{MV$K>M?L8K2mfl*{sqvt+{&#j|g;X+8VAm|@9MJA@|`bhYnwr#0- zI$lT@oEAGkqD~c*48H$Pg+K+m(RH_9!77=E*e>-gP<w%GY9pl|7 z>84}ue?|zg@KZs;X?R${>uC~wBE5Zmn|3ePLT`}5>eJSyD7`pbu^Xq(ji;xtojFdo zCAX{fp3<;B+E!8IpK5isXgJ&##NiG<_Q8#yim53$X zf}Ohka7iUigKgghJu1fLBFu>5q69)FYa^~06KXcVK0%nY!=Ldb7<14X7xy<3RgdVC za(oWw7-l&VW${c_^gUj6GqSv=_^OPyYls5!jLwXg)Gx7#FE6-ME&+k}bpGM3tZWuZ3(l4NEcOh~Y$K%BL%#mP z9qRr~CWXmq|BGozUECtfZrnp7e1p0Xxu6~YXW*X!FBi(1RsEc*q}-oJ+QTtEICz5e z+1AN<_Xlr_E+V+haJ&jh9#`@l4-_Tz4>5^cWb^GlE+3The_v_q3k(vZVsWoWqrJ+< z^j=fbefMpL>uQ9-zo7kN3fG6M1Y3yD3K_!n-z>nP`(|_AXl>UB9pb2K#_QaQWjG?P zzxYbdHD>!uix0cE6+G{qucczV10=tBm9irWGoilrB%Ibxc@xASUDY7@V5t256sU;NoctWZ;!m|H5qk|o|=3D2R)tgMRF)rK7Y_($7m z$*W`akLdwdVM(euB~olJcdsptR;(-k9HrSgcukwG!}hj*%kAy;Jlo^b{Ra0N+q=72 zcz3+o9ALGRjmC1yujUL;m~#bg>C$~hJase?Z|i(|9X1AELiVRrnq2cm0&i?ZsNg5tLRNPkP=>sPdM`9KtPJ}pxRqn8w5$xzfN(?_bN@iw%m zlMzw7$Fh93*O8)QqG(@$K#cdW&^T^!v>5kNc=3(E@0+i{AVOS*8EhxQfvyS$R`4J~ zKD=8>=L;x;3dL|{AU{5r&grn2FYON&BKW%)RN*tm&d@h?Vo9@g8NB7rg2bU)r#M&3 zl$#+3ns-rLRq4GUzeC!GXt*#4xfQI^d=ja+b42_wc+aG^672YZz5@Eps$FT5t`!oE z5_Rwde#QtTPQ><7?bmSf;V8^2LY<+Sm{LT6<>o-DIu0h|>>(_P=8dL|6I3cJ18-7S zSJG*?u{pM!M+Dq+r;#7&$UE%K!65U4U9`3s?Gi;WJ`B7OCTa0>=Lef z_*L*zJoktu;^60S6S7>{@)GzbI5hzTjcWI4Y7F3ea>=!uPYIP+N@EJR!lq8qt3rbKM)=Rxps^2jX7yP9g|1oj(!Bs98wfj-o19hHKzd0`06x|iu@Qh(qIbP zd?(L7MoOCA^orP|e<@w~DC=bwKU$+JhTY*?oN)81;hi&~m@G52T)rJ**8#nga)y3T zw)pyt2&Y5ob7*YdsjQvQjBD>LwLMre?QZ3)jaR&W?hns2Sl@r)Da)A7nDu4<9G5Qq zPxXBcxu1A3>>U0=tCEVn3y!^g<7>l3-cS0K^onZ5+ut7DF6`3Z1d!oGas|7sJ`HRG zh)1cKjGbHL!aDOr#bRLB=~o;|rn7&rJ5SHZt~^hwz19pm13K=rN|$lV?yFB5pjEj0 zu~U_*N5rTs@;Ue2nl;8{#u5Hs_9Ijy)P;K!dQEz@`zHKedy}Io`FS?q*hf5$n2KPE z0GHOyW~5p$Kt}rGe&dr>iT>=ZNKTAyPy?8;1-xc_b5!YIZerfDZt%u&4z(N=1b&~F zjeZ+sfE5;0$dPG0{6)K6r*vJxJjs-$jG0=0824EzmI_uz>9WF9pTFM}nYMq`TLt&G z<7n}j)}|$n;BM*DZ>CK_4LP-49ghMtBa4DM#HzY>jQo$67^`Z@htxX4!a`H2nJGdB za5lZi1w@o?=XraBYZn?TJ)>K&x?s48hQ5=!C&qZ~IKf-5%AnRRQ-~X2JXR&sTamX) ze!+iiBYoF5Xx=ej-pA3H+b;70ks?3RmlRwtZedXs_D{R$MU~I3bVq6%xH>M~!~uJZ zaqRn$P^tSN$X=s7=@9)R5j9LgTszp;%Z=begIXo;{76@DH+jNozCX0!Cp`B^{cu8@ zx9}1_exrCqb|#Jl$7NPzoIeD{ON9LXHr27n8Db)il96p^9Cu{D7M-hDg8w{@I()^1 zXwRAaEn150B9-p{0dN@^zz+b!5`>HuC1AFc9o_caeyel=e!#&Q2q2nBTN26l4j zFQ#DkC9s~e?#G(Vlo6}Ii)&$}MkCQ&?!VdH_p9Winp=EBbpSw?sy_t0mFnW1>1E<0 zHE<`%Y{jYWeaM#?xc{-cz84(A^r5=MgHWBFLL_$iY&iD^|Badnx^8LfGWb+J`4+_6<^$;&EMjJ<1D=Tb&hI?xnRDc5Ip zrJ3Qh#f_B-iuS2nN3;z!m8!1!o|9NZX8imjg6?uDpTtR&NoqYVK|Kz2FD-u-y0}5K zLo~uJxa!GG%dE)I-F6ytaYsc8>_h*-rp%N$k^rKV%ik*2kDjvTR$@JtWgjr988Md+ zw%`>2eRzkfp?HQ*5891wx?4y;o~zEDfwYyA0D;QQg?-WC*=%%jeE8XU5?_rr@M#BG*4T&A4voC_Q_fsV>_>jkQxw z(|6E!w#}Qh!;c7I2(6|Hz?{b&Qpw10;5r0eC0v{{Q&+{2EN&R$#vV> z;fa#=`e|3wfZiiv+oB|alks@tkk-Iqw}i{pRW)u`JqDIxIZ`ma6;NP`z>PsjwCQGj zO>f77)BtLnU8o1LH`wJ*GF!J`090AW9Yd09c9c`&n5Lt5Hz`?Y+K4oZ@_q$gntg2X zJiON#g9Y%dnnM#@|Dq74sTQj2dS{qyh2bbxa38@=fA>?1%1{eq)}b#-<_)fWq_Syb zm6|FQ(1uw!yITiI$5bY>v$-B?{aT?;2tk=)g`QzWJ};}3eT^M$7m=b}+BPgjmK=D( zvFQ0-7=0_5|4n}W99D)*FErbeo5AXO2IY&4?^WaY#toxNtxJ%ld2%}1(= zAN(p3VvKMQbQgeHT-sa!xGKa4_5_XgWl|)>U%4Bq0Jfn22{c^FP~R^$cfeV|nb{J7 zFZ0X{6}*h2;%&~33wjHUEYkX%4}E!OxfQVz+z=H4TcCF7$N)dRRvn?OPK}6nP!g1e zFFgKECskYNAzihSODS&{T4#+C%NWm%E}6U0Q)XCv!;zx3J2|M<@?}_mNMk9(%*D&# zb!-3el$2*AGC`PMa23Eq)FqpOqS&CSGVl*mqsy*J`XgJBE@H!DGctAdhBmOr-3pTp zQaKz{ha6X5k=y4se;d2}cFt=BKjyj|VK>XNKCg~XocYhEnyhFFzEAgRzWjMf!$}p4 zlrMVmPBjv2PaZF<)tQtP+!CdCZ%__VXOXXG$RbFpv3Z?=$n@Y5;FQ;%)Z@-YR$YA8 zsu}px-5+gMLCrW6`0C(E*(Tmo86zMr$v9)W>E4Pon^_Jv%9Jy!cEb~{0)>8*-~zvA z>jGT~jGzCqAVgOw0f}`^pf*cZI)x`1oYZXH)^G5QJjTYPf|X=++30>v21fPeEJ(4u zPQFiV9@mZ&D{slkR;LqXB<@6ac zzWSw3@!edFg=|Vx?Zhy4CI8lXsUzBKlU!w@i>NK-pQUZSR(&xoMZzz9_Ya_7=n&OpFcWSkOsUD-Ur(xb>Y>yAvB z^=f?fe-qE2%RA%Wuk|S=8QBzsDK#a2r~YR{{1;gxi$>sUB%7I38j}=+=osLT1qNty z59v96{b7JM>nw(WT>0R)-xNWZla_+@(#M$d?wXM;s^{JAZ;zUll$u5UT0xFs4o-?e z!GVB2FFMZd%f8fcphS(NS6g3hT59ulO}PdpHISD{QKQ-d^%QLt5C=qhnFDF|_3i|SBbrZA>iC)x^+DHl1Z3C~R zZUM}{vCOz5tMK=G7=JZ#bn@Tj6~LU4k?F1;)`8rSMm&Gjv-?ZQBDMz6A)+>L#zGGOAJe0Bq^{U$y~k1r^&JN)CUzBhS7cCm5sJqUit7 z^_5Xke%t?mLrV-TJ%EI?bPEh!0)muucT0B+($Xc0ba#n#NjFG$cQ^lsyMF%{_ulVa z>v;n&X3ae3?0xpx`xC6=U$He@5c*?fwL5ZT@I|==( z)06w^K@%$dn!7Ij?~>$z_>KZ206+ElMMLa4QJ5~H^SZu?%;aw9mNogj>VJ4~Y|b_; z^xt{l85*L`4oYNuY%ahP2P$uu5;d5L*KuAsnpYl<_Nh4|!u! zmeaoCLNkm%d*C0J9Gi&Fe*-OhLze=cduUN!6Zoibh&>=PP3Cah`K zf8)`=k`%(ZKO3xDHc%`(d9@G{3sC;h1IS&6YyQd=sQtsC3`91m5JG<#MMLc>Z?lq; z(q|D?XQrzQgY1w5k^S3ebrAb~Y_}ePMF*%6?I9p|BIQRW;GLcr)CxgZBY=i8*i8WF zxDTsV9U&MtPmd2aj*gDmy3RKLlOQpQ`m*~I(i)>vm&tCr#~^Z2Se~*WZ#e#ka3SCG z#I5JoV3g7N405AJoXI%y+5+C&@2&A>Y@!$b$~lSJkshTYmiMF801NVv7-8T_xs+lf zz0LIM3O@WDNym3-@c-@Re`n0?eQEt(O)SBywwV-i3s+V4>`u{#q>a)_zj0Njw|}#T z=JN)i^Ok3U{*SII2b%d}sI&qp=Ldy&2bgIvCsM^14j4=iHTzuuG(B4MK6oY>0c2(? z0oQ%=G#)!!x^#Q7D>Yvz^M@^iE7GaOdr_{4_s^|`|7LyiPN59oW2Gb~TXS$!F&+%y z+nqpG0aAH2Af{etRoaky00{kma+y?HxH05MC+k>c?a-y@_HCHPViYL@*w)1q2uHq8 zchUL#P|(cnPU{3GNc~Hzqw$#6H2LG5COkJE-{ga+%MJd=YO=Y{)oJtRv^H~7B?}bq zr*%CQrkh?M?_!lQIn=8>WH|nhZH$MoK6B9ny@EF#>$ens=4(BR%UqoSOt{T?T(ZMUwgqr&jS+r6OkPK9igINuOLbe|0h2j@sG> zl2rGzwlsO~$R$-5l>$=76!+Q6Twd{>#l+e3^#2^CQQ`>G=(%B2v(U8kWNTnUf700s z;b}6c!Q*51+8p!=)Qm4Eod z+2;ZX7p|d{ISujU-Bpj=Y-KK$0{#HC4uDSr-%51;-wwL)TThn~{`miuF~q)8zn7!_ zs@MF6+8SVje(Ii90MP4-A>Nl&nFl_LfLcMoI+DqxY-;X0*hR`V|!6hPg6-i$|;ZJPpAt%)P34KzS5q5;wvwLzZ@5W~1c z68aYmye*IKcJu_6jy`G2k;@*=`s(}6)qPg_bpxyp%l&-t0l>kxA>p>>y%fz0{>MOq z*biVQ4SFC`xSj zL{Wf=Zr`aj@RKq{DVdpO>>l26xz!)orlY054`*Fx3-W6J6#Q`=4JLF+(fglF_$gff z21KHh8h{sUt5p8nsn+JpctM1$@RwMph%zv@Vny@QBi68hY$sVmWYO}4z6nb2UGd8mdwB6 z{h5&t#CiJI8(?87q<~UrW^cTj9wbRy{^4c=Ne*AXYFe@G)347p zRm1#6-hk{gZ?*gGij{Qqd_?;HsCo9jwhjo^2zx2Fv82AW-;6bjAbhuAm-Xvx{(?`% z=fX(x?ux?vnZ1~FWgd0!^)Tu!9o3p(}Z zt1LT#;1uw|7&~j*lu08_zyOH3~rDu(?vQ1lm8c%jT%%LY}U5 zWCFFH_eAvU+YoJlB8PEy2=9N$R9Qu~7R&OV^3pUnCYkc@aB__T(k$m~YRZbRM%RpbzcHl zyF%?3{Qv8XDuHeh_Im~2w4_+X*)-Z5d*8mBV;h}aZ^NZCHLa@Lr7R&3q}?31K`u*6 zG3Tf(o`ud$Yt8Ynk6!-j7=R|hbN49fA=~jhsYDpQOXJBL84|t=Ss);K+j?(Xx?g&; zEBu9&gxi#=H3ZzZ2Lw9~3{JrHj++_X{|5*thpSF@{+ea}&E=M_0i1}|Z1xgM#P?Eh zpUaj1ewH<;*owZDE?Mmz*D3%OF*~&mrh0RplSAAUpFgL~@GO|K7H9^}TEHGtDYWr9 z$1zR34>BxmOz)y<4er8#<6r#OVo7A}k%o!YAGo<%7j1#P;;Ol4x~Dio@f%e`%jEA` zr63aZ#|qBeu|!}4bf7~0OrSxn4)TD~ly#|4^X(o3mSHhh!5+WJ-1$U>=T+%*o_kIZ zDCKKz_c=TNkeKYfaPxmxnkmHZjPD{1~evm72 z`4H@4;ICkYwBOB0)ywi=P;!5$OktOCf;9`6d-S!Fi$l6?AiGPlwdbtyT%EhsuhUjz&*?nj>Blwk zOmpqwMuWMn^}tgrjj6xifsJFM{qb*KYay2GmBf@GjRqijaJc_zFNNN zxEiH!+Q429280q87RdEUxNUoJU_CcP@@M~0)GWKvju7f|CQBgI9W6|fdq)e@X&=l? za33`m+VGdxDJTx?(mt_%tGX6yx&3P@o;}}boU~D6XDL2cWnuNp*wo1Lz@5U5qG6rU zre7wHQ|`4QX>qFEVaj=r?U&;avrXE2|&eXv?kPyC>`bH4p7E zYd{;r=4kQv;1Rd&yj$<)(Og}bCyejtU7h5?Ebs0((r%5}@VC-OpS-B&I#TthVJi57 z)`<0|Ww@0WLta!cd9#bnLFfk@F?XN3sp%hGvO+a=!v+(&T`qv42bt6Kn(aL=65o&- zt>SH0&8-Dci7&Whqa}8KJ95!|C>6Ps^mZbxQ8>%Uv*lu$P+m~?#9YoMk7_&N%7i1i z7petn2ayk-%ZxN^7#7pGZBidPN7_WR3e!D)`%u8w&c`PFv#ZOLi2q(r{LAt|o7{tj z_3#6MClSvrcnVFmtD~c8W1czBz>?x~W=aWVybteMr>BRCG|HQuEnE>rmEV6sOKd^( z6n&T4oUbn-3eGvI{9RH0+l=Q={L>a+T-Kf3bvv^-F+rjfc2c=*){i|M!!n1G)#DKQGUq;9GJ(sm(7b(>vDy_M5hw71hxFS?L`%E+X>lA zYF-HpV@6~1iL%M2pwdGxKP37j9EzIk_g~MuBii=_`YTU65x1LrT;}7JzaJ8MEM%YF zTojb4H6~+O`5Y8Ps+RRnZvUO9TzHJ`sdgSj+OWJ0-L0;sf4`|n)kYPVxT!nG zK{5Bu`x4JAa~gE1j^OYuQ=e!v85xt5Xfq~7&h%TVJK(nGp4s5F`CY(i&IZ1_^R_)0 zoy(*TPa%z)`GeQaSdPd2Ne`Fpd_85T?#Y*r=4071jTckeX<5bmRbVn^#`Z0@-LO^< zfKHJIoH4^5r+?@=j71Q>s}Obm@r{zwC-d0;k;-lqa@c%#m}{PWIl+)R^i0!(V32N+ zy*2J7kE1y#;JBl7QZLncg+!xLMNP7{X~OE60@O>{6pzQDw}6!eQC9o2z(1=FTzVk0 zBukAeX7(5iOx;f8x&0&beBNo8ONQtglZ0Eohr+js+-=!^7$%786dp#Fx|ddJVo2Z3-nqj~BwCEgVf2Du$=7PiEaVy4RK}4Y?kELWw#G>QAJO<-rky3NBgGss23u?5|!5zzz zeE|(D0LOeK@)&U&O-!;YAEe=_N=ZxIx|%>DANv2X0A3$dq+i>4M3>d^AiU&1h}Ay$ zYc?Ensi~lU^t!bsQi^x`dmyLdy7a^2RSOmllj%zuw;;HP6b`eHt0%zQ(D_Zo0)f7l z+wZfCqX~hH zGeKGmLF9z*9E&1)J6=-j3E%x44mr65i0(Q+rJ^yt{hU1jtFKbvimLN0?h&XXKI3AB zCN|r5Lv(k3Rp$G!_i3|+(K_xHnDP603ywd+G+JFUaZI=3;?e!+dW+y^r@8gczPo!-O zDbm8WU=KBXV!UaTJzJJyK7y^t+8oE(iO(^Q+`a|2zSknV0;Vl7#1%2KH|hFTV0}+n zzqXj0Vq?>vhnu@+DXq=YehoWy05g~$@b(HoZY7MQ5D^BoEEA&;H>`h^Yp(&!nLfI{ z$y&hC9~OCPdRE|&dsaofhT^N5w*D)*C zo?rj%wKG62>EP2xa>^@qx`NX^Ez%@@vi5n|K_n7v#Cff+yG|&tL`l`;jL3BOgb)QPy2?|Hol8Jodi`O zZSj7S2f09QA)nQo+=K~-QS0-p9LV^k#DnkC(NnM$;b^~Ab&^A5waGxH_vPQ)^E?r< zeW3?T@u=vBj`|;w$H9a9f2+i~fE<;u>WMQN&E;J;48Ks2>BuqwlK(D;LL>51_Vv`` zTumCJFeTaU5eWJF=6X{6P^%UFZp=4-ES-J33cA?=wEjOrP{R0(-X?ipcTixST|WP(qshh0 zmJG;J^z86aD58Fp{;=CGBxTfE*x0ZraZEm%{kyM8O*HbCOTJGJw;Qa!uV_%?w&V+` zA;}|jTF87Io&1g{f1&FGADd4T;5L9$_@#QxG#LBc4WOt@VobV0U4GcO$MW!SGIVQ+1ZBona7>jpsB; zdEqwXfP%E%4#*q*eD>h_vBoPxG15y(_0^ohF=md?%f_!FU@& zfZk|aJ}(I0TjEzNLpnu;gti6Y!H*Vf%jZsH7b_GBZbuA52z*YGqVezPzNw1r4&THG z!iZQ>fhMkis9ziWVMNQ>u!sl>eKx_Cgr(1poF3XR>#zMvHrtk`ha==D6fv6lKrG?> zIlC62bgY{$K5(8{hKstslIP=TQcFZG4_QVJUeM2 zN5S~I@1HNUlx^EmQ9p+%e6CmQ58m6&U<>j3+&?y+4e13p4yOq8AQW?mOnyrIr;PuO zm2!lsb$6V>X2>`}w@#&uO!FrX7h1UvWNF1UzUxCF->OAkq2*Ryfs#p0O`{&Rev8}o zAIjEU0Zl*Uz&<8bn-b(HFsdp1)Cy$dg~Im^kl`{Wi@&?G4%lgq7mV=nn5iCbb3G;Q zaG<0E;pL5ggqR_vR`RkGJ%lNS(JPq}>+!O#1(Csiw;&J6-@hEDOvQN^C2)vR3HmE; zjH0Xq-PHlqwG0e1jFNfH5WdNHh%IB|W;1%N$4UjjQQe~Q1+l$*S#An;5z~i%`6DFz z4lpnyo1*%;m_1-L$eh)=4VV2g?(tdU%ee!CCn(GdgbaR{ZgzXVW1uH?lDyp`O*I6| zdL|%K0FGD`Xew|F65e2^pi_vyE)cJKKqC@EK88z*!rOT!=?Ys71w8dpEC_-Y#lM@6 zEgCo@PJ05orZn8&OtZs$5x@`D6#o@NDb2DDy2#OeNlOITig^j6?L-Oyg^9na#@+G% z{=M10VH1#o%cSa%|E4 zQc=Cw!Ohw-1QCtFE~#x{+*D$b4qf#(fVvTQcChO>b-IamBJD(y1UoB&14koS`SvQl>*AofPzef=qLnxoC zC{~24ERs6i8)KUFgf)y<)8PcidLJ zP3CR_bjvW(iS%?>BY4(HMy+5UyU|V~UG2#_($2sPJzAEe;t%=ANaRQ`c4LDGl{QU&b3A!?ZU6#inuWrP2( z$6r_CyKu%7EiiaU;CF$F{I=L^BmPkQQ3_E*Lz5{(S%zWKcA>D@yD)*}M~9n<8M`*K zfgCElItge-W#M)tZZ{5FU*N@WMzFpOYWepw6@gOT+9I3*gyN5%h)ef%#r)iTxYJP% z0iA#&gVMBCvH(Y*1{dK(6n8PL5@2dgHkXp8j7Xj_tt0~ngO&q>{`h1W$6(5T;V>8m z@jIe3h#6zYosa?p0USh`5J7Pt@JgWlvN!iw5d;L2q(TcvAqhg)k0FQtgu~>MkxKTU#sDJ#AQWOHm_m!+0$}9-s`6M6=5@n8FvQD_w;32~U zwk*4R4jbxO7>0ik<;p$JqMK2S15_KF(Cx1AgtpV#kk6qkP1?Wa2meA$QGN!Jt=xBXJn`dDI;!6E)dr%6Ub`Ld}`8amLUkdufd6$6(&E=I27Ep(s2-5M?dvbH1_gGm-NV z@gKs+X7eY+Z&3?8fje6010PlZ)9L1Xuc9%z(ThKLW%#pFOOOWr0k>f4VQ|AaBrCVE0&6kP09BDG&t=++cY4=_7{+#_f%S8(*@{a{?g>TVtwKu^s!(p;jDQ2J3s>s3(O=)HH-b0 zRGbE9)Ur4F1(qONwtt7>SA-MqBTv2L>nFfM8^80V2?PXIlonCB8TE&TRyYV#83DD_ zX`4Tu_XHt)JF<`%mU{Jz*i%5mj|N6Cqalmr?|(4LLJ&r!Cew?+CJ=5-+2igV}#$?OS<>5_o`7{#-yKw_Y_PT3BI2ky7Z8iHTn#7avK zi5lZQyvot>%5)uXfewNtBnqXjJA%CHeXxCCDo|$cA(s$&!+S>`?rzAIzsN&x1I!ls zg&h-HErNuKCN@Ne5JlXWVlaU zuE?^-X0_fF_`%8LePB9io2`Tsf1hxa@P^_eQ%<&ua*B%XUuW4AXUVRwAN1&tZAzM0 z_1>uJoZ+~jhH@v4-$zD!K*ruXz1|o{Oc>i(oB49nG*(cplH**7^aC^J({GC+4j-#( z4N1*~3M9OTW_yFJ_1)JVZ!>HkadaQQ&KeQ_Uf}mWN1VRwOJD>xB!NO|r^lC9_~F6F zM2Nc~7<)Ay7g~`NEqd1j`bBs$8`FIU>q1dmeA5SGIl~t7^`~aKGDzxWa{ouJ>rl_+4#;ib6RIpM!xtmb$ace%&X=pqGlQ_ z{>P;Q_;Xf@@-bLDNzuxQHt+ma&Eo*p9EreG3O{wZ)OM0Md#b5wDZO>xzR6E^wj0NF zGw|7Jw9Iz;$oEoQ;T4y>#iODL*;e7WTQR{0q z!d@>;vp=l6OAz?(Q0zFJpnUe7!BC_C#6NA>q@%B;k=`glQAcIvPjMgGk;z2p5Q*X* zwbOup4`q2{uryaI55$$@cHffJ%52=$pHUqSYtxX9JZ8+t-*%$>Z zp6pCP<#RSVnBH0jUeaBDy$VtmBO=YAa-eLN^^;d5LljD6=&wkPPac4@^A7q_42AEV zC0~|zjBK``G4&W)8$4s@OR>O*7(wY5orJ~u?R&k*AhOC(en{U8wiyiT@~4vYC#gn( zPK=GOjhH_Iw#E2k8l&lfsaO`w6_RGyN#uwn>Hzd$g4$qkKbvIXLy93>KPEKB2(C2$s)wHMSlh&n7JcYf2j96x_lri}3V&6?n(VyWw2uETbzVKai><_~b^x zltH8_5zW*H5#bIRN2$i$PE%?!dT7Om0a(srkP4*`v^~z z_)OxbPZ7hbxu5|FSL;ii51~bcjV-4)S;xjQ4&#+@atxp3i((ke+xig45)Z_tI1StF zF9y%m3g4^HrWf1?CwkVpx<;#;J$|+PRs8idxXH3^0=V?})JR4Xt8HpnMxjKFAsxQ9 zHt6c!!9*qG$pcp$^zOip-ZCnE3`q$DoMlXJ`Z<@sNB}u zY?0&~lHmj_XgqJ@gSBOgK9-FF4ygslv-lsqYGt}#cTElL;+e4AF=3`@#BZkqp>j&*$zPNW)$C`^}NbCfY8?Wa>k^ z*6*#X{)wZBr?J&J+etFLyGqOjyLiVp47ebTCe(Txc=e+2Lrj&GSx^2uwN~=AsmgB^KQL;I~OBk#{r5Kj2?x*~6B_C}UNk zKZIoOP-`)vu|dJ7elRu=%ARaRg$V8q^cEYwJKIvl&w+&o4CX{(?u4@u8-PT`2v+<5 zfr3;c#tg~mgWxKI@=GD&AwjNA!c>9D-=Rb}ZNa3XkH^uC5KhvR??v|V6{%+UT`)osf<(@{hC+ys2m!bTNMF5 z;R{F%aM=-!_$qb_amLoOo(M*Hw~nD>uC3w+X~yQuZVJLTO8DT{OW)0A9h(f3>dO&M zk9!QJ+v{^+`Wor{EuAh3*_fzjtV~+qHq(R3A}omNP0V9X6p?ilH8C3BpF8L{cmYx1 znyq$+(3k=zqVh9sYrXnva##T^!;2N!7Nbdq%wN_q7QNixsQb)uNsMBfgI*YsvqW$N z=~?q^*por1Zd%1?s9TAqbv{?e`m!TZ9R%Z(7zNyyX5*UZbt8D&53oxGhj|$5C@iH8 z-O9_wtLkp6tCDY!%KJsUY z_?#|%kB~P^_1{zqE1Jku8BE9cY!>R(hD&q`10VP7F~0GC@Rpj$H?aE6&8$)8{*?NY z*@@d?KD(kXaZEO;4{UmZSt3N;<|?=sfQargDnYo7U&bpuj{@h*i}^~B=KSJx>zmaV zYpVy_qRMSz4p-~?M3$$4gj7LyL&AvYA(pqVXP4f~1-s^UdMy<_;qn1TQ4Y?=gb2O%pGlG?8>B=vk@gJCp;aapd&fg_h-EjO9cPv;Dho1w3E(+C5uIrE0f=iAoVR1U79`68j~xFz$y}{T9?K7 zg{hXY$?UUrp!lmHs4B_hgw>(0RZ0=i)h3i7zzZ{T4wY>-avu8Cft@W(QOIyX{~3rB zvMqcY=ina$!Fg%%!frMNPhXmziVT~Z%|!2(00MK_6g+1vQ`MuY*zO?Ofoj9z#Yi2M z-_jcNJ?@9Ya`dphldWk3Vl(f>6Tu}RziAC$o^)OWoQoo;AO%G3Anm%{m%o(8DA zhr;bc)sdU55f;%;YY2%7EzVPo+4{$Z$2mKc?}&{zn`I#tP7fD)%1fzlY)VR|8@3+p z>SI>`EN!&qtr}O(Rnff!L6Bzi_r}Hmmd5RSHuYiNwSgGLaym&oo6JTwGUx=*k2S~E zzyGHF%ho2w*I&Kx{_)`Re6FgZFhlQw-qUR}WlsH}VW)9ZbA3o`D7525IBUm<;w~Rh zaAe(0wQBwz?Y)mH>Zg3z3YcHT&PVOdz%xQ33>R!(5ttirPTHU*0VF5-j zNu@My@w)>aZZA`szs^&H1W85+wEF+{c#`-J;pz`MXv8-#tCpCl+f4}!{nDUT^vu{? zxI|y@K6Bx>o|oQ`4emxMzckUKv;KnN zA;unnX;Fwz5aEWW2mbNsqs2k&fWgFI?5Ij4$<^ZRVCK#{7*RH{OCF{)1ccc$g2|7x z;x{1b2qB6Qu1<04ktXF&zNO;G0#R3@o=6GB5|YZ@ElW?p83tomKs{w5QE-O>ImM34 zk%(+e%du-ZC4ZYE+XNsU#d62;0}=Q#dx=P+oYn*52GN&>6akZ^SAqt6HNZRe{Y0R- zP*&Vb=+?wm8#2Yq_Tb07OpWQVw(#>+E~=+|T%4$0%Dy$X*}>*sQCf5N$PY2+%Igw0 z6KNG4>-+~R_tNifQ(SRMq>NSN;iYd*2 zRF`RMd{|cPqw1YR4@jPFVA{TGTAc7x`p`@EsaG87B;cNlZWk$It@tQu_P!v-4Hc+<6w$}8`=ayTWwUKjo|ZH=zR7;a zgm>z(yOSp%guujfohlpb(!4ueIywRWaQ>LoqJunrJ~?vhaZytCu~yonz4}y5Z8m1m z!r(`~-outiqwga^j#R`;N2cI?M*P5hF<6e;NOuh*r@OJ_w9h&-zqOe9WH7$$)^zBV z?7fKUPchqUb?aC8BrSKox0?&$oGzzs8{y9bkB?yVD=mE0*EaLDG8chp9)k^ej9lz_ zE)5v7>4j>#h&igS)Dj9g>p9;Aa$io0_}qLaSiCHz1%uUKdto(4=vHQrt252TZjVvz zP^15k1#pJ>-Of%oXPIpY7Ns>`x&U|gmP97a!ZTLhk3FQ`#Y2K;87o}1_1$Wf#mSr4 z?oNMlC@`nT1r{Gmb;0n7ie?>ypr_ku5iW-@=gDPS<4&FgR5z0gQ*HS>^-8`(bZs0vMLc0JwljgF1*fvek9@ z3NY5vb&Fd6BI0|hN->@%d{c1j?%qT8Iryq4itOh`f9CXj!!VNQ=YdgH)Uhwa34_|T z+re8Ot*3vp@*l%(ket53Lx*HgXM&5x6X#6_eI#96E>hWlYx*VJCD(NIflclgS3Zvc z2<{H2J_*;cEicnbJK7f=r?bY?k}uISy-%v@`4CN~BaM@+|1HS?IcGuP`%i?3wAbOq zpUJ05>~sVgb)G(8>nq{HySBY1cG$H@^_*jMo>rK)nDzM?&%7xP(g;cOC9!K-w-e?b zJ3n(erJX3n(f^T)zse}QW!L+@uDsjBsOZh5!ljy=rm5Dj;JDY~;h|mTFz8YZL8o6Q zZs3AVZ_o?NX|>a-9g<7zhi=B!?Kn4`hy#+pU{Qe-Ye61tn}74^Hro6g zNa|j3m;(K{$eES8kIV?l4D_S`7{PBf{9r6Owv6V85GcIkT|)ooareX+#rNz*DVZPlMrJkz_4E%y?P zMR)X>gTWJF>s^?8JUMS=@hxlIMjgGkzKmbwCSj;f)*|-d^Dv}Y(wog~Rzc#FKNUy@ z3$I;VuT~HJI>jtujd)*JsI->)iHRHCKcco!ahO>r7gyN55{Z;YY>8X`LuI9o%#tXo$&{#s072Bo4s$$6%h9S@&OIi zw6xMdAZL^*`(-d>E8cfajU|nW{UGX=*7O?$Kthj)zqR}znZnmUutHoK37B=W&xQr` z8VcVmJl);7tVQafPWJ&O5}TB%H)LVyo2H1u6f%rAtq)k+wR49fQo@d!yoYC*cTK{_ zZC5)Z;#i|?$YjO1ih{~5jcXrQJhnKF03^rg!c zxvT`6oYWitS;fxqI{Mv$T2^)PE4}7#X~Sjs50RVn<--iRZ*KBTSKVeB5pdy|X@Ugd z!S5d}@-nhWZ0p&xYkyy_%8{t@CN2$|Isu8Pc@dC$4vLS2W-U7QV|hp{@p#AFBLC;; zY(@fs3|}9z7*68aP(ipNffS9WY1Y^OdcvgeeBjH4$5Rf)`1ZJNo@UE+itX*$cy_XG9QI>ruX--qU~P{V31Sl`YWIh4o??syoXPqtfQs9DMm(VyiuooB0W;EL_KI*Mo+xtGnrczf*MYx$%nCP zZmhKe!je9uBy)->(xkiXGy-vT&`q#}C$8AA52%(;~7E;HpKUa`d|Zo56hv6?zqEKwFwIf9GPTgx@q z@VI5vyKTb}p4}fvMMw$vaLX8H|5(MoSVQ5C<#CbKEfS30IF_)P$dCO*`>%2TALDqf z-?DTV#Q9FTKIyv9AO)L}LUW!>)kuVhGB2jtM&`K5EFZ2$hfC50cB@*;5Dcufmzp8- z1#%aoxoS?%^YDlZ$A36Do0o{yxQah;cV(bOgjT{<84cMJ)F2B=+K$M(vzbhjge^}} z3h7?@8oJy+kxXP&N|VP=Z}AI-D9b)(-f5LkR7&hsrNt`|iWzIjv-bks#Sdz|=8hdc zS`TL?w{urd*@Oz|#6xP*8zfgp19})X36o9rwr5(#8(igLAdR1J)cDKo!Qt_E`aqAs zys@L`l&M#^o1#a@aVIzuVP7Z`hsNh-5x268nHP}lj$_g#SktDw|n*sZw z?Ff#=La$~o9*jGu-;R+!_iLFW%ynKO^PKOa-~?zn4|7*j|5;jAGKmYPlIKyI3Wp_z7^?geQVIn*o3xI@l6cl?^AS63)Yf{TgsVX_g1( zW?YT?mC?b?IZ;)nLPd2MCj<+pzqQ5C6sABK5Bt^%M0$#<`-A@mb^_)Omy*u`-plAX z$;Mb!5Hq$MYZQROSV+n~8B;Lkl(u`mWY(euYvTFi^~6YQ5}MKH@&;2d#2tq-REne3T4w zCCCGimmg>r2Px2dWi5lKwk1i-B$1PD_KIi;3;#ZS9R`5 zry+)KSxb{}Udw}U`@{BlyUrvGBpRdx=p)_8yU`6?a(`z6f74FWc@Jf~`p^!aE zcD!MKzaP!_;Q>@r;b-j_GB^^L2idm!fz>N+Szc}#C{$TJ<`iWe8Ed?s@jRtr^dg2C z=5i+V>~|8h@m(hsc6`FJx%BU-!Zo;=VjX2H@FWPL|KO?rVe6rn1sd3I@sJ#jr3eJ` zY?Qr5L+CYDXJBg(v_jtIuimXE>~EvlYc$+-m-S zm_6$sGIz^1&YpfMHCugUahsFJ7&|L`V{0DI*Xz{n=5A7E|K)?$Tp`xvFa>%3mG>=y z0{=1_mwl32fv%cFfzprYZyLAN-5FeJp-nE0>S`sbHm+9JXmV(Q-?y*3|0Z*@$0=H^ z);bWMC%sv4om6t}?F?-9!LR^nXr{X^O*z1a9K%{hRyAa%e!Zvn9XbGD=@<31is|2n zeJ*8z`g*G)Rp)Bqv!2^Q^|<1+-s3G{Y~(66MZ>2$xBcnJBN{-^rnvzA ze!<>8XK9`nv(w)QR8DbnrnCUbWsK?KPd{C(c($*up47{HCS@8A8(DY8 z{AX;KsK;Qk57aG^YiO0|B-L=?v&)_Qe>4Raq$n3xHJ^urxw1PKj2E-OSpRGwkgNV3 z63N@4YI|yPhBBSp@2?rvcS|T9lbt6V3<`pB`Jz6k_gy61VtpOcEbvLIcKI1km!`t! zQGBn#`fyX%_z=1@4uf=b$gVLRVH1Ax1DTa6nbp)3wvcO;%hw5?2i-=HfKMlk zo_0&0K4KEfplanWdMorJJ(1r)X|hP=d$1lqq{WU;C9;QJsPSi%jQbSX)?Sno4aV6b z(c}DAeHaC;GTN!2#R6jkkJ39H-ya1`Jmh)}c5*!`!e2?jV`>c8bvff&nS*c6+Rtvc zt2KC5X|nHxV?}{*;qFZgfzEygI=R_~1>XyT!-yY-MQ=xYT1o=x zNb#DDxwA=#|X6f_fx8wqq-y!z0UWfz>__NZzY!6r)>Zk6A1&_v4MHeZ!#- zEKSD&F$5=0AHCHx9|!P7>!T<{ASmJ>A|6Ntg@`i1bDsn-$SxadAGa&zSjUltkKrEx zl(Jf;@T7r|$P}Pw1YgUN{Z{aLIq*EG&MSJX(-O={w5~GJFlKi@qGo8v9A& z-z)Rc1-n7x*$Dk^om^FKRru-Dt`Z7wWwotxJf_lFxSw7PH%T|@)?D37Up?$26APQ? zkG-a9HRKyi(TMq^UDS`TPpE*vG)~{DwUv&i&y<0Ks9|MTbbsY^({xfHq^$poszs~l zV3C(~MxYdKdJNQ<=!d9*xb%&_wb2!&y5;=0vfpd%7M|KNF8D<2&u`l+b4EwB5sAS1 ze-jul+GKEDrm8K7t7_C=oNffcDWnw3*hvrg&d6%bm$fuEn3go3T!pG}c)4NgL;WDF zr^MMN=#M#ai=2u{87{?0c`!I_iYTk0h)*n`tYvt=OO7}nj#<53hSWqxg8qaeDJQ7? zE<{sNXqJ4^yv*+YZS)0`&nQ=Yx_EkXAQ3xKokpxg#xJLz(K<&ZRf9*J3oF}YnyY05 zf{SGv0XL_LQ0!Rk91JIr1wa87savrHI85cIt#qAKG<@yn>nlWky70mP(-J$E31wDH zE3C&2RxBi#Tq4s*lRLWeYFHWOjty9g@SAUx?fvdxtCx4VeQ5gDOXRL4x3zj?r(^B~E#vmB23Y{(h$h=*u8lJ*xe(ETHBo2SYs#^_p z+8hABoPea+{qU>X!_UoRRF-R6ekt-=7#KG|J#$g!D+Em!db)or^mvW?%mfgRCqAmM zomN#60_&WvgeuJyz)^n*mu8!tW@=MJiLI+L9h@3S+2}1RP|f2kL7aVDdcQdSpQVlm ztA8#^xeJg!W4S!~f%FMODHju8n_Zf#!+=jzF4?^%DPqYhnG^?(zn@~Mul2{?05_$l zdrR-__HdVLnY`V$IiFGrp9e;d(QAh=6Nv1t>P*&;iy|(nk1DyzD%=UGKYAnUQ?M}z z%=q}V{SK@b%A!!%b{l>@Lj|vNMM@h`_apoelY4rE6)4>a|11*OjF7_2 zAZoZ{lowZ8P46Ul7rH$5>*WTHF<=xf5j1a~lv$w5dF4#T$XMyR)HENTbKG96zTV|9 zx1V_(8u7+tB4)SfzB`BC{#bIstBZuwy;o9=P@OR+gmQ9~`NO^Z(1BQE^i*E%?x{Ax z9!bl6Xt$Cuy_dyc{B8nbk1zsXM-k)!HjQXpJdxY~jZ3dyRsbnf9v&jIRbjAKep}Ii0 zG=7)D)0>d;Zek0<=@RX{pHcjh04Fivuz8DR!DTCh|7_s%&ap-4n>2S|;V$i`W!5OChlbcJG%AeeD%h#|K@u1|AMb;IC|o_>l1CEr zy`2cO0eb&p{G*pXt_R~St@CdGeq;9k9nxBg#+Gm@n}@c+HJG4kBmCOL5#nv&U(;6U z&FR^Nl;QIK82iewD!Z*+)^dr2C`f}eBHf5|cPZT5{G`Dc#-O z-RYUU-?#TU?|$9ab-efi$n&f@$DAYP7~{U@{F6kNiOu6B7Vpdy?_+I}>fdkFVk-M9 zY%wlWUwW!wi_`)kAD(M3cOoCZ% zS_vCj;e}&$;#3TMI+{q{_IW-7Y)rmxaeEgvLXNMKu6}ir)J-T|*kh8=W*Agr#k!NZ zsKjrhJq>fZp2rLZ z*akG3N{m&=n(%Aon>+_VM&in|NTyvh2;NPkVoa|_6{)62Slfs)InC)uM@JmZ(-4i67ML40 z1iffxNT(nd)gfG1hcfTr`ivhf>A=t7as)|kN2BT)!SbHA)i$0&0(({G2-2b;&CWX` z22RfJucx&=x0@%<{!G;-@kV z@yyeXpx^k!qi766!|Ug!oyWm&#r}^R0ol9_qT>TJ*{ZbO^YR+GF9ej8SfUE9HVThU z&18w#U)KiFqjPmMX~d|YrmLpd^hK`5ZkTE$rTcOOSm;@&mA~E`dhzCkB%xYH4rluF z7gehn4L5rbtBX@@g~#lID4ZSZga^4(?6AI2gDMgG)vn=gZtps}#C3qY7%pzB7r&*a zke(d9h*d7uM1!tH%CQV#Nr^+A6~CuyxO_-Ox#3>nA!Sy#>Zx6I@FAT2?chqcTmR4< zhp|_Kq8lQV1^Sx%?^;7h^H-DIMLLsp^3D(OHgF4FOjRx#gITJDNA-T=PP;T6SFbI| zalYw8kVCyadj*KtO}^ip^v$a3)F?R5xMqPXJ#|EJ)c+@U;2r)#+&anYBKra$_UQ2* zdIAsw=v92}dFnnp{QG)EkV*5o(&ngDAHeDnlIz*Ew}3Q2!El~dsl%jIg?V!z7Vlz6 zMRt8XMLPVdO^CFgZ}$At+pEOk++%>{d2x&K1GhS-zf6OE;qeLPUyK8rJ4*4pR#$?Pum!fq*+&>9;rhC>bY!H*9Lp+E>w>WLH|TU3Da;N{neca z!-O9{UcEn=Es^+b-__3x%vPv;mTDADj-0j@?OH+^+_4v1X(OR*Gn$x3c2YBdVMbv1 ztc>MXzN&hQgMpQHnY7NR%|IM8@D-$G4U!L{FH|#B4Z@#UDy;WwF&yQm{MLHNPgnP* zLz*i(M~%@|3fmlDSVLGM6O<)~GX%Bu8WcN68ApE{e(L{D)k@Dr-IN_opBXd3Xcga} z9yWa#Xz{yP=!9Woejv@O7Zf%ChF@fJjH~c)VA?fTPzVOEQd< zB;7_#tPa3;A$s5fZHgpv%A)#uMY%ZU+hB>55~2V5;P?#=Gv)@$ ztzN-h?^!M0%dJu_*A5&tHMMnrVr$SLeQYjpwI{YQSfS04N%qxtvDNPl^GU-|N$~NG za~mw?q3g5m$|oR<_70!J=EcPG+}u?h%H-u>(`(@oM$v=ICZ0Ta#K+M*vP>3k_t% zCD?i~wD-%?*uk^7YF;=-(KN2%a+U8&%or2G1M3!oCxL+GF^PK3!cL&j%S0q}gZsn{ zk5`(|^q$~uZpLWWsJDd>e0?8qf27j)&X02I&o^~mb){>#X z;2`+%#D1{xS&l!Z3a_}`{!FaDm3F!1$BPD+B1sZX+4qtdFRwG*tiDu;$lb64n?)E( zA}C2nP2^fd{6xE#FxeF1G^k)OY#b&~0FY`m3Tk|pTMzH@*tHV=9^BbTcD@;R8wXHW z0M(fiNi3@g7x-c9>FF7WFydPXBz$F6@HE*&6j+b@g(PngCb($ltt{IasLpDKmcVq*oy7agfxhu*SHI z>!g)(Vb-mDesu+Iz}5v!80aqzEPoY>`UV!P94LAWGl-?n-gZw9tzmVT+JDVk;kqN! z*6#!xBi*1(8Tq^aL%`f{d{0n{TC@EQX5Y^LHpNo?bZ(&-@vHR0vg~#KRB9=6wfwab zk3#SCjokGC!|2A|zHxN>CN+$al2 zKGJx-Rzw~7-+xLC2dw#e<}tgd8J ztGsgTFOG#@%C0f8cQc5dNM0Ff-+a?nod z8}%fM+xboL7be-l$1g|h%U?rUN%Xuu_e9i|a{w`sx1}XN0hWnq zR66!B!4+E50XomU_p9JML+k6N=&n;B!)~<(SkHb~3?~)=K+iR&%ibJKyF~rT*aq^g+@V4Pdm@CF}f)(zLi@~;(e=g5%f4?HD%Yz*QsE>+GoXFC*D0^z!ghu9?l*0XO|2kBuvK}U=(M0 zM5x6G41|Jq>1(|=*vtE0KmvWYqSKgtq%`X^chl@YE7E!ZPhRaH`I&ymHQiQTOp{;L-t>}c%z z)O-u!g@!H|9se_MnG%thmP4Csm$6@hw3!uv$2iDB>Za`38mi(+pC(Ft_~?TXTY4=S z$LlR!7YH5Skyu`L&xnykVEu})P?c+GT=G0D8l!dD4v%OBbcdn3%j{4(C z{X({m&K*mDBC5yDr6dbR-_8BtM`6E~|A}{cQ zbC_eJ>0(r+Wy%slq+oc zlQk!JUs;W4ol)D0(DU*_Yk1*lkYrWle}(~u&~3^W_4{~`^6ZyBbxz`;GXu1U9CQr6 zylXwWENxzlPTnPz1Xg_ZH)ELjnm1pN)!7&j|?WzB&pO= zn5G<-ew~Dm2uG&aQb189e4HvKt?CgskCk_W%I0<77mL)|TE^cU%0c<&=VN+1zcfYZ z)RNHzhCpNFdXVk!wr$o5bAl=b3RgPE)GR z!+FILe}DhI4;)@8pTd?|qWSO*x0GiJe@rK8whz6Vo*+*FAo#!I-NBF5(MVW^>zO!# z;yxdM@4N_y7E>KCbUAV|Z(>?cVRQ;aNB5YxeAnc%Y-<>f>c)TvWpL!=Z)64F16?9> zll5%F6xGLmuU*9SoVvE>KS9038STP-kN06r_L*Eo#JDOaJ``G^zfZhLz2 zE0dz3WZaJaPQ7aBj_M)avIga~@fqGsZOmvZ{VXJ|_D*h{ag~&)ZN@)`D{uG|wxMEf z)ov1_-Jtw5Y%7cV32bb~N6f3ZfWsRecSa?U6s>1krmm0j?U>kgJeN6XICkU46moL< zRCVw@72?fcfut`F)Y4fpA&kp6SpjS;v(qrR8MHt9hhgf%Hd(i!nagn=X6 zu@c|cb<)_3TP=2`s(=fAV3%WuQAC^SB7Y;_F2fU}%fRS5BI`Rma@awnGXRGHnlP58-oVN9$ZBBjw6xj_u!!bAJk%9bzg!3B7S*Q257w zaEcM8h(Uu}408@y7+t&7T`x<)b=9;z5090zF~3zh*}(O?zkFwnNldFhE6#5i<2K7F zT4Au^Q^M*SH`@_de$o)F;U%JUuqnV~r|{2#$)_@)`n4bp5UQXYWLNDx+(C~B(^Z; zvh3aEjc0IC1$7%;bX_q$mpTt(Yn@_kt6sBo+U(}vof$F8e6fU!ErACeS?t;d z(*_m2+Ijbq6AEvdBw<={%w@R0pOn!czMHb6bOJg9SNR zom>#?H}`zY{WPvb`;{jeP4;cOne`{Ng2X%E=Ao?ma3(d%wmiKWNs!moD4@>*Cs~@M z_d5cgRmgX=4S4P@6zm_&{L^e*KJs0elvWY7=Sr8$(J9l-BIA^KO8Q);Nm)XT%TmQ( zH84Y26FZOJW^m?5Am3|a)JJ)?483-iDcdwL?X(8;bSe1*~udKd4!E>3^b3K)e1 zRaxK?IosqSzw`+uu1HDtIFJG7itVCS{A|o9_}iAM%p{rO=c}q!P8#g_Mt1kNH;4>k zE;V{Qw=G;R2V9(V6jF0hnz>-kC$9YiRb-w={Uj@eDmEUA#7v0loGje}R0k^G^k1^c zPG*}<9|{FBS__3j!-~Ho)g?~_q(oiP%z|7_H7~~FF(Hqi30&^#EZBzAW4?khfA78j zpH_T?z5=;)dU)gu3+b#ft|aw>=s11NJnRRF-{MGEU#8`9|}Ar?Akb7Lr=^J+{E+)5k)gtGy<^FWkof=gi_$n24zHkoLz|T*{*TiIU*z>KW1Mg4w5eGg7*+ zHb8W_-u{$q6ao!ANs0tzM!K-q(ceO2I+9+ zB&>9fYab8~C`pBq<)Yq!bJk|ki~D5stA?82ch>LJ(+MU%Z)tFJ#LzBx)d@FHV%Goa zMI}X>lCLfpgJ+I4spzFypoU{8=6`=Unoh-G&i9^w)i+({(P${vc}(}~CVQ_HwsWro zR~)mgUf?4kAG&y$jK`A0H?MaHc?5$|w}@f!Hamk^VfEMBHk+|J z*8P5{*oiBJQQ4|doX`C?zqA2PwOu6o<1ynfEuU@T#kTw{)4z0H!lUK2I$D8Aj;L3G zN6JCC#!DB8aoiNCueb(bmy)P>vr1^JC)3rAxqw3_;2qLu4(CsIHCc!Q3JU%tCgD}g zK)BufPub9H6k=t%VbjtMyaS>l$20&JgG^UFI@E-<{Qz|0$&`M5}nKz{HzeSI+N9;Ee31xJscU4}%3N)oG; zs<48lnLbPjt&q0Wg^5rm7B&>SXTe|ustxw7^>S>!Czx{_cOXuO-Q1g*pREU$+M_D? zLWI!^WL8qnEd#MKwwgDY(278IzUCvo@EVb;9ysMP#>PW*rP z7<`iz4KG#wrCG&Qk^^aR)%3a;J{PhSLSkPi1(wFQzRhB~&sWAjtn=v%ZLs;{?>JRU z`S+UAW}*TebPshDt13%nOkkS7z3vP(czct$Zx!<{g^M+Ij|S3xt922VY@sbO;ncKC zAt1p!%_@;WjSYc*Twq9e&2}@TE!$GAOzJc;VI<&5UF}8FhmG1y+XpYKQ|F#~D;7u- zO84NLpODw#JMu%)$AZ|>5tSb22dq*gR0xnQt`sX(-W}|JD1_ujM=Ik_L`38|8h-3AnZ?s|>CIib02I z6G(pwW(;UBsfrS*iW{#KUZ^x1JJKkI?xAtC274nzN70gTv>qFKUdC#)o<@bj>6Crg zUCqMB9}0f_Q8Uc2P$qKel3u03W+na_d`a?4PP;?>y*A+aG> zL4_)(@g1xQ>`dowgC?#P?mXion@L>HbU%U7yyJYTuwf>2sig-P#K82Ienn6ag={{T z^lM3pFi;XaH+hans9u#3)bj0ayi^?OkuVg@lfnK0RdS?`4(MpAJ<}wFs3$<#GTB4k zF(hoTN_w}HHID^c8;ffm0IJU<=jm*&Tn!#*-n~y88ts3G3*|v#pEEmVDN$cx)3+?T zzGzw1kFU_G(;V~jAQ_Dcd8{0LgRBkMbu577L=kS3E!4L zmH}#-K}woYWVjVc(bB}L7<0_Y&(LD_BJn2(-q3$EKOBiPDk znQUImv7+z|)}D`TYcE%_Qd|~Lt;UK(z=BUg40DFm<3A?c#_U+on7G$JCgvGrvVVv@ zU+wwU5Q$~j&Q));(HKv|Cg~zkj5`|!_kBW^CM*|JxHMt-@(VDe5%Y^tiT{Cb`|HOe zUTBOh8j}=N*>v6r6~LtDmgV!T8%?f`d4HSMnB5{47@9k-AjvT)15Omfs81`5IsI+a1C2eQ^ajNmcEHr&8Fim*&LnegV z{+!+LDIth+FXc6AzS6vSHq&n9tU}HU?MmzT)3yKscq#+R5e`Ih@*6Bz=nx#EaO-y1 z6;ka%y1|iIDeg5?AW>KfrpZrp9xv}qFnVF3gw|Z?kG~M&SnncV0BOC9#vVmltXm>7 zw4E*nu6;wJ;UY%8Pz(2<=wy3PDqB@ML%CS$(0Ta}2I#Nwe?ltt1#PzX@0*Ru3Q0iO zR+6cdLb?)4KZ=y=k*xb0U8d#IHl;EXzc*^P%o(F7{j3BDZXo2wf;|=mqrW>Qa*rha z;=--040#N&&gBC3P#26MfpJ&C3e+un-x+2ErU^%Ci8|%}Cuh%JzZolqRoYg2|Aurd=Ntj%B1sOjp8MPi8ft2EpS#QH8$pAiWyARv zO<&Y)B{Y5Jt)6^pFsK$Sw4dh0{!R@#KQu$GWF>v3-FK(8Yn<|_ z5d2+#eqCxDmUP)m1G}qcP_ZTlBy5F4A%q-_-M!-8z)ZwBR_20t2m0I{BZck`CJH-a zeUH}PuVquRU7pZyciWdR)&P33>C$3sJfyC>89W*V_f;!^#Cqt1dZLFSTC@93oeo^R zaA3;yu#w2nvLzDGHI7yuxc+&SQ>kF@uIOc6zB9G23SSPu*E-C7L2Dsrl|Qhahm1j# zr^}qBqR3-fgHp6qZBUznW0Wqu}O|0}VjIHTx9AHKVc~vaxySwvwQglan)% zjU-p_TdnF@{3y~U$>Ps4PTvAbL&%7liCzKf5$o-|^2BsW{%rcjq=2QtT0_2jI|4l! za3*G0ueQWq6MZYkhH>M3M+8-mr;sHIem0-|NO=^YVe6b zfSl2~A*fr?VAbvgqHa-v+{8)5Cnt{oqAvm1Z}fa{)l3LkvxK+i1=cnaKyMhB1m8tF zn9np#ALxz8=G0!)a=zr1ii)N+<6%{u>2p%0>7gdBx6 zW~lE)Apk*JtaPO#G0?^I+@3?ke&%!fGCu*c%y7}GL_K;8brKAkfqvZKB-msWU%(57dl#&H6{?^k)s=_B}^}Kx0km$fK!CVNyHKv%aS2F?kaSi+< z$1m`MR7JVaEpQL=1GoWjS)!eIwD+BMiYMLOr@Lx;h@p( z-p_MZ>@_Rci62t1Xb8*GqzGs~G_N9*cXag6-qMNM5VC^8YJgAEM*?PE{l2946;!|n zk&I{fQlFwR@cyEKa?rkD;S;j+s)H&jgd4)p4)@PC(U2SX4X_HWd;4E$`mBNUwk4NI z6jGgUvs2U8s}p~oCSpbuh#zWcL)d98)k_1`uJp34j05b@7j*U7^sH4%R|(P#<qr+rE@E84; zZEy;vXAjfdC2O%3vAI<+Vz+ijpkU;ENg&eqkU(8QDVkn;PG<3ke#=Kn9|p$;q@$W!jFId;4M-|mww zWZ$HZMLoKA?vJ+!p@ShX{?{R}-OhO2@>yPQi?Q7`%g9_%uWQPx?R3$SeYNlP*jScb zC^%e}<3Cj8&L6#9&11baf#s13yLe2~#{oX-xXa5nUp{pZ z)gcav0S8TC)7w@t`EZ#!r8hw8W?y@Gv$hxBE(MYqNuC=U8xE%e&gR`4YPTGT78fH9 zye#-{{=BX@^E8KRrNi@mcsG1=vJlyp6+uAYTNM_VxTCJPBn?yFiAGS%Pw=!$Est8Pa51TgYb{ zD9P=!SQdhrIR)jKK=p33xD4N` z<^R0dSbgU?w#pqKo1#hs-9rXYD;R|>Iu=4kWJq?6_OSRu6zsJJK%mnip7fY*6YwQk-E?2bjAK6H)GqY$`Q2~# zKBI0;-tOY<8|Teaid%nVt2lBgTkyN3cU$jH$o0Xw$5=|+ZKcT`qzM2?Ju~nM+V-p6 zDu8gt^ZV0j)j!X4ZTdESvXvgChz8Aj!AKw1O&=xU#dU)&WJYAbAVhPQpjWGj0Yow9 znjpf}y$qHruhb_HDkUZOz0lj+K9_Erd&1Y07V_f@(HegF%+b`1=lNT}~oYJy9zM~%9n*X|*-@5MD zZ@9slIi($Z`fu;k1Dg-HU&r}9yMCHJaqz?Y|1UGKMF<%oz-r}vwXW(~SHxME(P&Js z=9RLV{?t;@SNfvaj<QNn@ z`O)F_lMfHzxSK?~KNr68=nMZ+9 zBS4t-we0Lt_cbE}y!P~{`P}|Dg~J-_3pT_XvXau$_jPr3U9MlZM1M|nXxC3pj5qq* ze+?}Q*Kr8=E|_nXN3BS(Dyc(EJ4*VmUI3lo;=wZHb5qJXd6T%~ zr$+A}tz~316N7?I%zn=I^*lp`F)8NO)&lE2FDXqsqEowhKwokoCdIq)0;_Y)o`-SF zdhc>`84C1jPDDQDB@1~M%;vrO{?(zJ$ahy{PKBOzCHEJ@afMs#6S~(jUUX1-5~>K? zaPmbqiNFZN!YSetov3`$YSt4->)}<~()_$HXrGuHFMXPbbea4Bt8oWI+9!KAIaz=2 z=)kYyWAqb7!Fn>C*0X1%jXs0J!+#0z-|{?T!Ytxk8K{IC8)F)A(h^IWUQ=G`aHcb- zOOpGM@0GzcZ?4WGD8$@cTwR++V#Q-c+{QJ>Q7BrU8%lPu5RXkxqS8r_GgU>X(_6kOzQJbjkt6)ia@hjljfFuCvvg0Y>M0z*ECMJ}={ za!a{`Ipb_W05WV43yZ(di7=R2ZaUhT=G&U8BC)pa<%FDMgccTZ)P1?gMw|_7Eicnu z%hc7U6(YK-@oelRD19fwJYb*2sI z&~|jCQ<&>$o2Bg{x=KpRI8a!1_{PR3(LYg_Nr#V5j(2h$xX-rAewrMcAj)AOp0Q!@ z)0l(Biv(XZih!&IKZypUym=pwr=s9^D7VN0e7N`YIj>H#8Oa#6%?et?rOxqLh; z!81z&{|6)&#wbN`EI*)(ffI^mxmah+uy^l}jD2zDB}7HiH)yy&&QLeYqp{Lk_#ts* zTSjYl1qZRxH+gu>Ew7_0C7+)7+kQ#WzY(nStero<5qM@bla5-nyMf_-(QT29UWwg_ zCMZ}hT00PvNm+eH*?D7L6}4%Hf&M1%@R%gvmS7wL6+z z-Av>wMCjX#WvFUZ>%IHoa1{eT@Gjbt)Wi>SjDcS`4Z-4Z`Jp7DFD zo|PAsan$D;i+)MyVPNli;X59WqwkLvVvUW_5|=_RpWWax6P79u0or4>28@nPV;dz{08I zjeH5A8nxp?PAHj(4>~>mNLY=-VNHKq8^K;|4Kh;$da-E-7@eK(+Tu3VIm1D)No>&Z zvfMzhH~}iFgum-0L31o^>-Wa5I^xyOIR!myJ%wa^`tL5p=Id}lGTpDGFy|hb$oQPs zsoHI)bvA)MSBs;6*e7p(XsZpM0maWZS1+v*cY?z%+una=_ztWWi~%Mn;UgoGpErL# zgTC5hAROQKHKIY-D_sQ^-=0Jfqw8>;Jf8=5ZaWAEdl%(q}>`hvA~xU1?GfyALpMYP|jy2Xn7bSs|*AvhXMadp*(S_Ez+ zZE9Lf(mKa&^c@%;{SQ0SgvH8F`Zg*pjKEto;K6mksHVp4UOUsnJbMgT$wI0z{E8$; zHH^*eUrt>n4~tJ?vJ{`rKw+&G;aqt4u|`bg3(KQd^y&m*hlG-~EMnU&{C<1vRlJ;m zEV#crDou}+L3cOXS)R%6U>r~?oouv@?4tg6>K0IiUShU@l2B{H7 zEi4Mr=M;!Mc^E1=1p?=*L^z`{MZ2?t&xgwT!%Ow&Zvn4}*y-rb z!lg!ousERWw#1 zXcx9Wy10UmCvo`&bEq(@=p48eqI5!T%H*s{RMq++)8RsyHUuw(NE^Qe>iZCtb;rf? zuvHA_!;la>j-|Tk)>Ho5(D$ME!C)b$`mNO;cvh?`_vXcyQ=|TJsb?`LSC64xZ$G@D zMTBS?gb-&&8+uu=yoaTsi@itUzvNheM(!&_*V_R6bHlWd~P z7^~Eje5|uUELI#t2x`#~&E0q!#Ldyk=rOu43QYIy9Z}41Bo*p-W$*pVRbBE(9I9*b z{W?Y?FLV4rilV0L68g)hh%=*eV7*zLzMy%XQw6;yG`xc2m_m%Vt^-Tf`2RMG+ewj0 zA$G6hcfUEEIDLbm6GTZ;-Ky3bm=_JSArT4NIUrNhh`xMu^pPQTTYR(v`P^#Oo~k&} zwk5AQcv|GW4-MiSvPM#_B>5O+Dr%A8Bxkju5Dw%wjKF)0!}`mryVCx&Rk-17O7!O! z$0fNq5wRu&zgKK`py*pyr*WdI{|ZC?E!?xm&>K|N`p^Ct`0BX`!F!(t3vYFu4w{I> z5p4qcfd{NJIUdfVyn!Ef|HYz5A>eY4W3A)79Rdj5cGdP-44dc9oVlEQ1R*R$@1Jf z0~hqxs|aHof=~WdkWo5}ayCBq16_QYm=j8Y?wcaw(|=46<>X6Ha-qJLlW%&TkAWg} zrrr~->`nJ*Yw*blu2k|Prmoo!UO6lVRPhB#Odv@4H2V4-)1HUehmrs>oO|za4)^`T zDjw!pIZS<+8@6#U|Jy2EWuwrZYPUZ1&fUM}w2UwEw)o&!2mZo$7Rh%`+DGO~QDGn? zQ;+-xnM47fO`K{k1C7->ojWq-swH&m|A~(v{_4P{zgf+osD`h1+Q!0>1GQ@`dCf+bvIZ#Lxk`NX}$4HgNneSJ6sa_(`AyIM+lY3ZG5 z523f7)aPxXB*Xh1G7d?H z_OK34y0F4_v&ypvqn&2bTiMk;{}%l1I=mx=G@Q!Uk811aP>A}Wi2T;PhW!<*?qR{a z0Cq7nFV?of(h}n#v--$L>r*7+^U1)6lAnoWm~XaEd&%3IRSS%L)^GB$P)1NZcpxhp z7z#djH))1H45D}hB6_J29v%wKz&to;TrH!VMd;bg)MDYrn_3nq;fLYG&x`h>$inRm z);7`I`S_;X_k^ukbgE$oOYNcJA0IXSqzI~IJbW@fGo#<*?(W`hUsqc@zjLE4T_hIp z;8l0Ef}NmKUi{ucrqO_l?K6}&3O?5d1!?Gi+uw6xQV5sb5CXOZG6NMo5imCpEMY!I zW&LKFi&``{Y3|U@2{rhLh{H6*c?*-*hMdF*Z+3D)8y-Qx>tv6POT9^;Ua5O5T5%WHTzD*FrU(j@z8j zhI>E_2iXu_VaYji=X|&SGh21+FwO-v2n1?ak4ft}PUep|ePrm@^uo7a;@xIi+q+&L zuexv!GbcYZ(|as7N*|Y7#4GOA3&s|=Ayq5SxpvwgJ+(H2hgH}Jyj?hxIojZRu@i>N zfn4huvbvuB_rZWOi8hQAg7@QWW9n8jSw#NQ%*#lxZ-xCR9uL9jE=iA$>TZ<-5@`_9 z4L|{&7{=Fa1E;M>gUp!T?-j9(g3hd9@nrnSSf*kJox`}O3vCjG$pYcLjix>B2KQIL1GQ`pj$E!{iV-D(8fBW0!<%9}h-L8^& z(Q(?Xc7~DfF`%&eGZk^QTrcd}A)l2O7qhh?8Nc-QLl<93Yh>+1#*7!}*?VR@w(T%* zyZe#B<99I%cr`+Wi$oETu)~w{zN~E1S6mRhyhKI(l^V=5eQ1}9mgm(O+oLBTVF0vA z@#@`~%kLR6eckh3oWqk`IdilabOg~?v4EdRHK%MQYbam%ydj`f@n}_cKbK>a`#=2H z8<=1)ST{lRi2Bpg%n@}E463&dlZs4&gm zy`AOXOq&PR?SfIpA4vdF3Woy+!U{xSes3@V4r{hBdxF;c;;ZP-ep}4P_a2G_MQ88t zT)UfeN8ZJv^m`w68t@Ja*md1%2bo$?J#d}!9mah9DuHwL;8be@pjvbHDECVB$cZ`# z3|uY=Qoaa#cz94hhzVu*Szuz%Mz8Q3$OdU=-WM3T*WPwdDOsQ?@YQ?j75sJ4B?A|5^dY)dgM)+!n`5Q+ciU;?tT&+QkuRXw@Ff#CI_%$xbV5UkWAtNP{?N)PTtj0o4IuE!ZS)x8$jVEIsO zponz+Y4X)f?T5F?4Fqi8q~7;xyoFnA4LDcT)d^Ipd@$Lsao$~0sdM>VwSB#xnC)VL z76(FkWFAs9oAPYgR^6>THa0AWx|bXi?~lvN%0AT8)HIO2_7(WNtbYB$^VdWxntrmr z=*U6kS!+c}BnB#)hS!1i@I7zIB(GzRMnBX$N=i!Sr!}9L)U1uSPd+3iC5>NO7+Pgd z=!76;p1e*$iBF%6Um-ieHZJwqRcezN4zspQFlr8@NC_hqh*TOG{rOX5Bd_Bu_i}ht zFlF)OGs1)wmz_7%BVSG%G7ad(K3VFbH6Dw`XdJVgV8@B->n{pWpAk4m%fgKasz9>xY7caHT6^laXgyb^1`8*m-@C#HJ3MdR$d9 zJY$xhY)@c8Y=Oq`(#y*GW@b|@!2){DcoS%%PAjj1h@ zu27GAMa^G!ima@*_x9#Jc^(4>U#(Ja#R1q@?@uiuiQ5<-2l~?&Y0>SKapx@02=ctC zakvh!$etQhduHUfb~=s3k%<$UA`a|g^?N^gSbZ`Q$NlqXF4r3w-|yo;?-jo~v)$jR z3lM%|Woq?KnUL>_nE(3<`z|Jj9o4_rNrt zg*FIdn}@(HsNMed5;%7`({)a)Bl6V^^apQFakG0+QF+G*2BT&-(Q~FLbDW;-feUmMC}&V$1ZAMxuTgZSdTL| zU&}>q+>5n3VSBVJcYwY{B$1eN{{H%18{=rM5YtV@^3<>HdmkS?5zUlOcnd_v@3Mzf zs9*Q!``GXJdQ%eeewit|rC+|@zppi>O2mOj?|WTB?aH+39LYp&zh>*(7HZ$BO~{q= zzUz|g5kFa^L6^a?5LYJ7dTXs$E72>hLfV(#r0cw>vpd;Z_=m?-#yr0u9jf?^PmC|j z-?=j?9YKkLYB41>(2u5+A}F#mbYrMxv+p#<{cBu6sXh9Y6bUhlw}V~+>+jtcVH z*qE~M2XxE6JF6EC^Fjw^{6vQ{3S^$KZ({JF+b?Fz@RGmBit0sTk_ogqxqo^fhE)?QfMM7>sn@mfU{i-1R zt=e_A&8#4;UNz^%@n$dxR#5LdZVZX6^?hY?Kd~&0^lfWsakM2_2x_N@^1g|5vu zG!T@G$1Su3KNVlujUTh#%2mrz6#+4Dk;l0m=<$#Y*?^$p=yw`ZM~%hlH+V5!lUodZ zf33mV?0cQ&w4M0Y-u)9G`9nuNNkffc`I&g=lFaW~5_b$edq`e>}SDjq+424#|*3U$**yygq<_N~-s0|{VLTstZJMto! z;H_Yt^KNop6KIo-J5GReGyv@+#idupWUO}*pyX-vSnW!9FB?NoVNmbE-oTL(Mo089 zpOO?llH*%xyYLEhI+qC2n-cv_=5<7iq>&3+=}ug{I3~t3Xugn>&eYNfPFRCPIJW^HnsLu0MJ#-S>Xn9KO$q!r2I&Dl5KLA@3ODb zZkhMSdZyL#@%a)-rrs#nT&-ip#x0ev*Ofm?;zTlW#**?n{IVRoAwSW7WOBIoFJAy~Z3}s~-n#1jVt&rleLm-KU>LMRy5&W_ zQXAFTOjmyZDDGfUa*YDrgVWXf`5W~g(D759wt2s6l_G;%ENGUgUkO*(Qa?%%j2m5L zy-ez1fabX!Z)jFK&^hhSgo=JZqj=TmXN2^C_l{}bx90kZZxZMf{(xyK&}nR|VJhaK(}Bi&@W?)YnNt-p?~QjK84XVwire+n}8?>JUh!@4E-6_r11HYN+yL~xiXE_KA{3>kmvl<~96 z=eJ+M8OZ1)H|YW`uXonvykG=}>BN#=c6w=Op^nrdZQUdyRr9TO-D;a7K-*qazI}CG z0Us@3A-$IZ*O6mCZ2oBc>kAKq-Ab1)mDK&nZLuyUZkhhOD=K-jMGs76<0P_`pA@+s zs;i8<^t>IEjf~%=dma zQqU$m z^My|l(XY65Mi|$oOZh1M%=kkQQ_^=8eL#LFAE z0R-uNOXS# zx1iC_UBOCTwf|1~)mH&av{t{aW=fyQtC9?gZePVjkZJV=#y_~2FlGVMsb#AzzRTHOc z=&G>wt7($vJ+6il1t`5{=y6Xxhm}?6b5m+P>5(Gcj&7QmcYxQd39w)HXUJ)|79^gY z@sFhRR(wfej4>s1a2Vumbuv2FG~*(>OO2^|#WwO``Qh{4J^qx!70pLd7V_UOa&Fpd zaUVc%Et})$S#pkO6cVX(UeY|`>WcD={0P6ZnX{retbSE6HIn1?Q)#8sNx$?p255}^ z^nipnzMb>~Md!DT%!9zkJdZ$C@KfET6P47-<8P1o+>R=b$VT2@`{b}bY6g}rPVlR0 zG8(pV_gu9J=IV+O;x^JSp$9!4bobYyP92}*ZQ4k)T2IeHKD>yJ+P^p5dN~t#H!|M5QrjIvp+%h79K~x zDwq2%B!w^ipLUkqW5jN!iNu%ppix8Nob#rqbJh*|^>SZfNPxO8nxZFC^ToC(w+VIq z8%vwM)Svglv@5Jh#+)4bB_kt|l_Y48{c9RywvVpP4~0KI5EO42{aPI1n&bA!Mq&)y z!>M_Fet#s+U*2qWoCcZ~n}9Ug;N3ub|IK4lN@U+MjQwQ}O=Jmm+DGqGELE!e()a{A zn^#Mf`q$*4?`5(@am-E5`AX&N+MU^;v6IsH)aIGl)BOe;zq2N1-t+!KhCeHzQk| z>mBB#i#dD)Z`!ScYm`MJU9L?x7jJJt>~c6b4fu&u@~M(>qJ8Xqff{BlHX6OgtlbM1 zf#J)#yZ7-Fy>ex=h_>h&NQY^AYN<9-Nf1v&KlZz6y2+vWzj0jlKpBKo(itYaG7ba8AC0_eg)P^cBag8 zebOMm4i9kPpIuNk4Y`2VkcD(3WM*o#^AzpYSH@OVuuBK&+z+&Wc zbP6U{-Q)ii31C`#;gkK@L|Xj6ilM5{%}-nqjA78v3@DM{LGe1`kAaRz^wCLex=N2e ziOFzAA^`b?&N>oNV4oViRI1{V2Zd-X+aKk6Gbc~4$-DmUjAeuG0$*aimc%zJfY#z_RuPJ9!m{g|RfTyPK(UcFel?Rj-bz-_lcX|5`P{;FuD z&4W-9*OVA40(;)@fY~^^x=Mh~K{A@t6ovu4bS?osgUo0-&1Rl7PXdX$HugtzM>1Z= zylokprxaGb-rE5ms~?STlb8^qq;5b7XgauARI1TNmoM>+`WGcnQr#br0uctjee5Ll z;#vi1?AppA*<`>Qf9zs2|HjN@{-1A(1{9?s9X>ZhUXApx+TVWVOubFU9 zZ{@@ii8wV$mXg>GOAXDae@bDLq8SEf{@I ziU$c{4HsV>|17`r`c;QI^Muy)Y2 ztRD-{I35;>g^c6WUxjw*tcmwS=abSSjg)+{CMn|uJh(H0ni-$nho9^t}@kcK0;1Za_V+BS>Y8a69(J0Psj=9>ORM3Z9@1J3X z#1e%ZFV+w427?e*ly4|teg%lUD$bCD@dv}ERP7#?Cy2byCFVmk{ zpKq*%>Z>h7o$W&Hw-FlB*rle~dPA5hpmO2Ycvlq9R;(dZknWkwck}aAp2D_vK z&_Qopte8)${a2w|;R`M?M*0o9?`&nBcmB;4eDG-h{KsL{b-M(P`9Jh3yM3Kfn{TP_-x;4JdFaaic|3vHe(zzKT+VOLUeN=~6Nm4eQw7<6yd zeC!ZA7485pkJOSI($e8`y}nzeheF8Bf=efhr1$0xV4zGN8tpgFE$5CG>2AKdR+uxp zf%#VyQI}P7{?F(aCL~=BYn{R1CrT-sG?2#QFHG{zE@! zG;TL0-9{VQ@dD$F5FHZy+_n}f(Vie&nxH~-)R%N~vDGzJzQs7%4*SP;e~e6R17|A? zl*?4joh19S4_y%@af7N~%LsD3A{Ug7%Kx;yhN2=WzQ5HUJJGJ=V)wgUr#f9=9uwR^fnAfq)nC4ZIx~~b)#!zCnh*CZ4x&br@NhRF56Y2fEuKny$5}C{75^Z@U+e_r zp7Nm5^&66|WYI3Uy)hFj^EJ=Ef4>oB$S8t0G1&vRMuzwT1OFG-#dr0*$pBf?o1B%l zqJJ6{8i~$*F$8k=#HIx}FRxd$>r9O|dL;93m1$cs+r75v4BLIJ-Th#~vd%%UPpcKU zx#Olo*s>-PePn75>-&whk{7!szUKZ^b>d=4rn~!frU?YlqHj5mLjGS6EK;M(yZ*Zs z4TTe(kYtAH^^cc8do3CG;#E=AfeouVL#xu&ySC;+-9WZk&riS;ZNzFi=(r)Rm>Z$l zYG|H2nDVaT@`F0ABL;uo)3|8DlV2zp-F>}7w-ujq6Tv|2lQK?K(Mmo;XXo-ulh(JbYF4%f$u8KjoGA+)|^#S%@h0t8- zjC1}?`)m`JC$>zlshUXGHwfDnV$P&d&0-pOR*}ZrkB!LAu9U=Ji12WInz}`53Gsfo zKA_pLo$nNC1&d(g=@KHgw!jhyu)$bUat;R_SL4_+3oBo;fSJcMhZCh%Wq+pZ`yw)R ztc%@bsS-74_%afI6-T zWRBP0ayZ3cjyK_{Ql#H26q9TeLbN0z9&)+R=8=2J7voW^UZ!fY(t1@-9=E;WJ?^$T zd-W}@Nnb=-?3{lrmgT#u!uj`4Yfqe(VDhOfU9>%@Hc*UO3>e$62N;L3*cONk6FYaz zHp^sqx3ePP;kd)}plsYyVi8R;6w6nkw#p377=lD}2@!-!vq4-(C!JuRf9Gq}tg1FC zF_Mi^KE{avbwu1~CjCD(dM)_9Bc!|19z!+asMFK3wmxAk^54>FuG6c;oli1%)iIgE zBFJ&>Uv3RnKCD&Z+*1E1tw?elL;@Is2f;=~v#w28BlFbh%fLTk_Ce2>Go^mGhmMD_ zQ-gaYJR{97p4>m`UyuuZ{<_FjLV55JRYW`hhwM3>{4Ga( z`@!P$0(_0Tz{loW3KLiu{L4_QrD6ARs zoic*v`i8ZFl=IGjRw0%4))*nDrG?7SSDY$d*q(&yg$XPwm{F}bU!kp0Iwh!WEIW?* zldz!IKO%HO*4L(H5!;O2&1!ruJFjWJ|K91af6(OXCtI%2Y?1EFCO2LIuzewv7Q)+ERx`igm5bZfDoHX%#(^tacie7)~kwYrrcmA<( zJPj3`i@*NH)6{($ci0~N_#WRUFxHN0Jm4fk5-rDP1V=7J&nnRHcxEeRnbMvmdX3z#F!=V954xI9v6~lHT%o zrHL>YjWF3%@w;V#@fM7?_7laqszDqj{PJ3Ryo-DU98J6oX8;oiE|MJuZu8sidVLo9 zSm??4HQ7?{gnqplBSa<(iJUy-;R7xy;9GEgOC97>g}-AnRhB?^InhHV^ZVo6Y3hFU z-sOFM4AJh)+OGNXWof!Z%p0PVF55}}c*Gt>BrG4sMG`K^ud&+-aQz<2$ufHX<$?2*1a>LmsPjxXZ(7!Z8<%0+qzBorMkYm+>@~ zY#0>WNJ38s!4jyOzr3os(C}HJmUuo<8gD|J4OUtq-MfR@JHA5_=d_k%`n=A!l)69c zXfKksy1(qNlwgfz39|;h92^P5p-J1iJrgc(YIFS)6;C4-j;mSr?L#z4I1p~6{VR07 zb2Dj{2M`RWqA$wVM{FbU5MnbY2r(G~4DQ;$a~Sgtx4yb3ARqd(_`aK)zBIT586)B` zphc4lGr?8r^_a}34I>v~A{Qx+sOgF_-|bgGhnh$birw`1@8QtdX+vbqdY4R}Nuegq zBe9Tm8CV>Vle>F@TZGGTJWc;75(!T+2I)QnlEC1*;=x4vZyFVIV(xX%98T3BuPcQi zy|jEdk%tHKSR@RPAQ9dPJFR{XF>w=+FlogL(!p$yh=fiDDK5qExxWmb`Lx#gXti2N z3~cOFZ-_X}hFHd1efApHb@Pm}5ucuu-M9C5~S1YS~nq6imhKJA8Q(W^(AwVCM_w4{G)Ww!j?>Zh-c z?Z-~=T4cdK)fz2rKs9B}d>|n}Sp@B)GP$_O;E2D3_-Wi!m^C=E46Ka|f z^#E?&4UhsUGWkd)SA@!8P5PgA=*!25?i$K&J9~@~Qwfd6MtI)}69?#U*Z$z+j< zFiXfq3gu0*m9%cVD{%^oVU%uzEsewSf|z2v@0~6iqc(@g$x2sf7N4`pYCCe;)^QLW z-PU~kuh95+dfZNal2fckXwm`5f?ZCJ{7Fn|uMu_j;A6r_p$Iu$zQGS3r(sU-{9sW7 zxjapAimtXlY;VY+=hI;Wh3cpj*hX?GVdxBUgqm}jbB=!75p%4%MpH9o+6^Yi|qOVy4YqS?oZkIrH(Xg26Z!E8O{8N(F37+J(5d5RQ=-WjnVT{VEx1AqicKZ7GFjtt?I8N?`(}0 z&y&GBNvYdP%9JQS+((PHd0(#2qV~fIGr#ul^)+V;+&lfK-OT?PC_oS_^c*MaWc3af^-ze z_UDT5LSjj2hw8y%0pE#O*<_aR*$S;MXbnuWHk9_|>YSZ1WFl-KW7NTD5hk(ajbNW^ z1TNR7RbS2&c7s;9*d}smBTny%5t%8&E+;Z#F0%yjCWjU5hdUzy7Uh2n)sfFGThn5gZNeU6#ei?C}&@uEx`a zm=dLxxb2#*f7xx$I|*?WXIpQEhD%~8wxS#!*du%y6R= zEj6i_+*fEom7!qLO|yb4iAZL`<#2?WzBi@x)uX2rad$2e`ba}PPS*Mxm{TQ^ zB@)7zxJY8%PnTlf%vS?9S8W&pJsXo&Wf&0djBid17*t0-IYf}q*7pXR3KlLnz0xo= zLRtX#TpXk6i;3)&5QPwuC4cX;$e<3yfs(k&R|Fya#-k)83d6n3wA5|7OgW!!ad%7KoiN?x9vO34}Y61+iAr zyaJ=5G>IfJ{h(cgoP2$WK^pM%NQ%R5sIMY(^1A_XFrg^U&5%*7L_VG6gktgJ_zgVX z{It(a)UAeqhXbLQR!iwNl)&Rp_ZL60zh(n*GRw$`X+=Koq=bui6rwLh&A+qhg3CA`CGXY|$wEE>^#q=7^p_IwHV_sxztW*5ES zt}rXWco5KNfpk0X426S5>+9lLt$mm=RDOob;zviSohgE7KD{EOk-zxKvIx{-QEGKqBg_Dk2!zT+nb z(y836;1#0fw5pA&hM5+F-t!ayrYyXM+Q-WVi;C;%S$1ysh~XsqeQ?r7%YZ3n7e8z5BO8l`2F~&3g6;$(*q>94wn|2 zPfSA4{|eX3*^Ds3W`5Prgg1f+THidxq}d$T%>294B~@ouwbNGz+6x{EG504QFJcLw zLTOA#XLm7Eun>B4!3m;}BfoqGWfY5GTe2brW>pY8Jy5&h`0!kwb)Z&~uM_!NTVDL* zN^2N4o1kG!msAUI>y*D#_pe-C|46?XUnjK}>gdSPSvJL#I^AY3KDmW^lJ#3h9xI+?;jc(Oy~@a`-@_ zm`!l}N6a)}>pccB|HxOk3RZ!&#cTB@W#|XhWU2jDx1%uTMu66B0E(>H?SWgLq7_Y4 z+zlgoCZv8r3x!N;{)5#*o^Y{z4z*iBx$JV}!ot8>e7?lbP9dMTE+Z;P*c+wa#qOgS z^;{x$wC zZNq=&3Ow?W&+cM9DQJbqljTP+_mD2S$m%r<`BB?s+3`7NtZ<&+f&jv^)e9ljB6(Km z!dR{dy-kOp*LnC^Ryk6m$g5vWWP-Iy_$VizmLD07c`M=y^Qe4x?ZnB-RFBi3h58)v zj4bR>yWf|es9MUV3WtTwQEK#C@`<{%Y77IxR5Ua9V!Ny&F6Cg_^7VNLgdR%!4UbAp zd%49=4&!G!rzKav(UwcS+azHq3*A)8-1n~65!HS5;{@Cbf7{p|R*FPcQeo#nUNet`R*CF9MYaL?=G zx@H%@+jS~nsHNjo>l^w!>?EO;3sFdtlbP#N23}K>5+z(CSKGmba0r_;N;y%WY{P7q zzlh2k=t}!%1sH|F(xcBmAQNy&|5$*UZhon#ILN5jWYc1_(D0{s8oZ-4{3@|OQB4Mm zp@>zX2`K=zS=jD+0LM3R^e^rk|8yjZGH252XuG;mYQf~1N+w^zk)teb_IIxrvrh~u zfGd)W#Lde{T)@h8@O*@urbPE*uaG#C)^;Ths1Gju!+D8`8N#X2Cu%!UE1Z?RYcf}v zo-b8^t~FKzf*T~ar2J`A3sHchDmk1%$F7qN^q%=qoBF)|PY5bp+;&zB31C)3RflkM zyvWR?!2~(jy%|ijM3D|qnUvhwo1hY(_)fTDE|u(b<}odm#%GFYF`E9RSOFuMS|f$W zLBAh>9g+uyzu$?6xNi19WfBU3u3N;=#AgX@oCn3fnfP;cOS+1QAJWwEb4?L{o|#Tig|mRus6%jMwa;3=f4}`U+D*Lzgvz)c&9Qt z9P^&9cHdL*`(mHr^`_p93`Im({kr1XGfoJWo%kOMPz}E`l=ACK;7g~=zs3;RG&rb9 zKeRULwE?WVphQWzT*Aq2xxKG4D+cEsNzVtASTK7u`I9lrpw@^1o)bDW4mzf-=HdjT zo$tOKe%So-R=2kZk`efQ;A}2@%-S8np(R9>t~SUTk04}cD%UV6<(4@9-9nbmX(0xt zP)=8{S30KX1dLz7&pon&Wyc}MmRw`4O1BZlLSOP8l>^t&=lTS;DH$2WOAB-LsnEC7 z8Cw&*X!@T460q3s{*$M`{4e7vA7HJ%=>NGeUe8<#0Xq@~xRo9Orxf`Rh<)pgAkWxM z`tnk8b(wGd&tnIYwg9+`HX~6v2oR&tYH2qFB@&9ysG3J-FjA$D+7JIY+4e~Uir>~;`MLn0$|nF*C@wV^ zO0J+>09Iky$irFNuwmKq7+HLFP#S+H5-}e&*5 zInb%!FITIGuCHTD4hU`DJ$2CM2>Z~nO{V1td9kqdAb{gfW=H}d3M~~FDP$MRmJ9UW z#TW?ZLIP#`W69x@mBmA`@DkMfnDR7kszO5<#;t;`DL2ez>r5n4TfrwB`U-hrZL=q~ z`jX${#DuP!Nu41UOqL_wNVkfr%i{4g(dYAny+(=pa+~4y@+Nf7g(%Hl7cU%E+QTu@ z>odkL_#oYl_G=-5p_p<}SDilKDEI|?387nL0nw;j`quC02PO||9r<$n}Id%z5yo)TTX-IR_c3QE|yD_YM zQr%W0Fq6S^({6FziDxu@4Nc-AVA95Z)0fw2C>;h)MznbM!%J)ZHgAPpzVf5e<^ty= zrxR&U$I5c=`#!jbxA0O;DXypU0M8Cb{t)GPYSjN_f3Xeg1tt+?U3Z$pN;4+=CyI2D zyVZe*6c#-TPzP61Zlf3&O$o6MA~{H{@4_WaW+CqJj!UL0W^*`ST163YBdt6i1Y2RM zm{&L7tP4L0cu38B%BAk+?Wb2v!*-HU^=?nR^^W@U!}^yL7(;(Rkq6%|`hd2&d^v@F zmy^P#8l6&(Dhdbr9?_m)Ic~o$;5#pDo1Zm;yaGe6){ozgSFY!a*$pO0ZVokw21r!!ltdA5TKqV}qs!Bqcc_OCKHDM+jd~+wXIBMv8^TZjAZ<~P74quM zY9ZPY1PL9LgJ~MbV@wyj;dswz(KiAcPSX;m(@?`F;;XR_RNd!Bh{2G((N0zc2{f{OeFSX%c8i>_3|VCDkjsO3 zK9|>2EBhI#hnxw6HmqB2AZA1CGF#9K?PV+}m}4bN7LBJR%SZJOdNATv16v0^r<%T_ zpXdmYOUYC9e=3y0--FEme5@2-feZ6LRf<1h3^=$^K|53277DT_J_LZSVp}!WMr*Ub z{ILVt{R6#Mq+R?`bM^t?oeC8erG{0Ut6e_m2Uu zn8HsrCgvilqa9c6XFa^{!1eisF`(hIz=xhTGN;#g` zKDImNFrb|8vIfxf$9&UIpVDso9EdFq>CWeu_%&_$IppWn5qBDaD@Z$%qxJ7Z$cNM8 z_0MVko?mVr%vhYU^DC)>iR8J3xzl1a1Hg32s7e$qGfVwi>SXFF${R-Mr7&tn$<$zo zqZI>fx_N1IPWYkG^J<3$Vc3^1<@(iUq$@V@f(4N+Vv-TE|ENhAlBwqV%#%wg`P~jF zO_f^ukEY5HrDDh|le(E!0pLhl7`p#taGL+M0L+Wnr0AcC#meoO!|h4@>^TeSM#RVeiZL_+-}8Bo0q$`0?G>pSSOolW276 zKkp;NpNyTX_y^#>W>fJJN#=uKO_z+r`REByaj+S*9M%}B(J8zip0kGzOyc<6UlVBg zLQrJ#vw;J9pyqsMIMOCMh)~9`m{u+&&$Dh*I#pN*;98MHlLc~uR0$LESSKrZzIK1? z%oU4F;=&6jA_>QX1G-!faMAFYsM#>XiI*bJwr}lB22GhxW=4;+fK6W(1zRMoO zTW9iXcp5!6M;Hbgw0Q0Jkd)4l=5>GPr^x-Lb(auR@gCZhRx~?)oOT`ukSM z`{_}^%*tXcJv!)YYillBpxOH%fixCpN!bxOrCh5bLK2-Y;z+NRQ^Q5Lrbs|~vsj;! z^8|N~xLi^LgM+F2Nj6J+`9|313Bk#vAGBXLb$}+>9?a~ogrlK@ct~^nyIGU8=4pLz zCeJ^Yxc&XGM*XZ-q)N9zH>v+WT3-@d!v`d^a~N0l79P&mfQyL~QdcJ(Kz!kWNL91n z`d#$b3tY?Cy6%j(cS})ax)m&_wGFlgW?N!WL@x|ekQVM-EQ8WyvwF2{eJ=Oes_9?y zVT$E`eiSK#+tMBHEY>Ib-U<0umV(Yp0ZcylJ~t=vYY0wTz2c?H?w&Vec)*(=+Ue^> zJ4bkCs_NhX{ue9*xbPy;C75tp4C5|oMJ0NoUF2fybY2m*>NRbgJtr65oGkaWeBV
-๐Ÿšจ Troubleshooting +โ— Troubleshooting **Container won't start:** ```bash @@ -101,8 +97,6 @@ docker pull stickerdaniel/linkedin-mcp-server
---- - ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) **Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed @@ -115,9 +109,7 @@ docker pull stickerdaniel/linkedin-mcp-server The extension automatically handles Docker setup and credential management. ---- - -## ๐Ÿ› ๏ธ Local Setup (Develop & Contribute) +## ๐Ÿ Local Setup (Develop & Contribute) **For contributors** who want to modify and debug the code. @@ -178,7 +170,7 @@ uv run main.py --no-headless --no-lazy-init
-๐Ÿšจ Troubleshooting +โ— Troubleshooting **Scraping issues:** - Use `--no-headless` to see browser actions @@ -201,6 +193,8 @@ uv sync --reinstall Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! +--- + ## License MIT License @@ -208,4 +202,4 @@ MIT License ## Acknowledgements Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). -โš ๏ธ Use responsibly and in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. From 2348bce492627f51bf96838753b5004503885041 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:25:23 -0400 Subject: [PATCH 064/565] fix(readme): update Docker badge link format for consistency --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8ab41c26..c98ed325 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn thr ## Installation Methods -[![Docker](https://img.shields.io/badge/Docker_Hub-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended-universal) +[![Docker](https://img.shields.io/badge-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) [![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#-local-setup-develop--contribute) From 6ad3af8b999ebf249a179cf6e5824be56f265aec Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:27:03 -0400 Subject: [PATCH 065/565] fix(readme): correct Docker badge link format for consistency --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c98ed325..94be5808 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn thr ## Installation Methods -[![Docker](https://img.shields.io/badge-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) +[![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) [![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#-local-setup-develop--contribute) From 40f49616ec6eb10aca18820aefdcba8ce9a5334b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:30:46 -0400 Subject: [PATCH 066/565] docs(readme): update links to open in new tab for better user experience --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 94be5808..71af150b 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 ## ๐Ÿณ Docker Setup (Recommended - Universal) -**Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. +**Prerequisites:** Make sure you have Docker installed and running. **Zero setup required** - just add the mcp server to your client config and replace email and password with your linkedin credentials. @@ -99,10 +99,10 @@ docker pull stickerdaniel/linkedin-mcp-server ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) -**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed +**Prerequisites:** Claude Desktop and Docker installed **One-click installation** for Claude Desktop users: -1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) +1. Download the DXT extension 2. Double-click to install into Claude Desktop 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately @@ -113,11 +113,11 @@ The extension automatically handles Docker setup and credential management. **For contributors** who want to modify and debug the code. -**Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed +**Prerequisites:** Chrome browser and Git installed **ChromeDriver Setup:** 1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome -2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) +2. **Download matching ChromeDriver**: Chrome for Testing 3. **Make it accessible**: - Place ChromeDriver in PATH (`/usr/local/bin` on macOS/Linux) - Or set: `export CHROMEDRIVER_PATH=/path/to/chromedriver` @@ -191,7 +191,7 @@ uv sync --reinstall
-Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! +Feel free to open an issue or PR! --- @@ -200,6 +200,6 @@ Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-serve MIT License ## Acknowledgements -Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). +Built with LinkedIn Scraper by @joeyism and Model Context Protocol. -โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +โš ๏ธ Use in accordance with LinkedIn's Terms of Service. Web scraping may violate LinkedIn's terms. This tool is for personal use only. From 4831c4c122e3c8d3d6a5416d21893f04680b4f21 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:33:17 -0400 Subject: [PATCH 067/565] docs(readme): update links to use markdown format for consistency --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 71af150b..94be5808 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 ## ๐Ÿณ Docker Setup (Recommended - Universal) -**Prerequisites:** Make sure you have Docker installed and running. +**Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. **Zero setup required** - just add the mcp server to your client config and replace email and password with your linkedin credentials. @@ -99,10 +99,10 @@ docker pull stickerdaniel/linkedin-mcp-server ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) -**Prerequisites:** Claude Desktop and Docker installed +**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed **One-click installation** for Claude Desktop users: -1. Download the DXT extension +1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) 2. Double-click to install into Claude Desktop 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately @@ -113,11 +113,11 @@ The extension automatically handles Docker setup and credential management. **For contributors** who want to modify and debug the code. -**Prerequisites:** Chrome browser and Git installed +**Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed **ChromeDriver Setup:** 1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome -2. **Download matching ChromeDriver**: Chrome for Testing +2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) 3. **Make it accessible**: - Place ChromeDriver in PATH (`/usr/local/bin` on macOS/Linux) - Or set: `export CHROMEDRIVER_PATH=/path/to/chromedriver` @@ -191,7 +191,7 @@ uv sync --reinstall -Feel free to open an issue or PR! +Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! --- @@ -200,6 +200,6 @@ Feel free to open an LinkedIn Scraper by @joeyism and Model Context Protocol. +Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). -โš ๏ธ Use in accordance with LinkedIn's Terms of Service. Web scraping may violate LinkedIn's terms. This tool is for personal use only. +โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. From d81147599f11a3f2922bc8a7882b5d83668accd1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 16:08:39 -0400 Subject: [PATCH 068/565] docs(readme): remove redundant line about Docker setup and credential management --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 94be5808..cfef411e 100644 --- a/README.md +++ b/README.md @@ -107,8 +107,6 @@ docker pull stickerdaniel/linkedin-mcp-server 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately -The extension automatically handles Docker setup and credential management. - ## ๐Ÿ Local Setup (Develop & Contribute) **For contributors** who want to modify and debug the code. From 8875913c9ef40131fd1c3af4d446776aefadbc77 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 18:03:44 -0400 Subject: [PATCH 069/565] docs(readme): update usage examples and enhance troubleshooting section for clarity --- README.md | 67 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index cfef411e..b740e895 100644 --- a/README.md +++ b/README.md @@ -13,27 +13,29 @@ https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ## Usage Examples ``` -Get Daniel's profile https://www.linkedin.com/in/stickerdaniel/ +Research the background of this candidate https://www.linkedin.com/in/stickerdaniel/ ``` ``` -Analyze this company https://www.linkedin.com/company/docker/ +Get this company profile for partnership discussions https://www.linkedin.com/company/inframs/ ``` ``` -Get details about this job posting https://www.linkedin.com/jobs/view/123456789 +Suggest improvements for my CV to target this job posting https://www.linkedin.com/jobs/view/4252026496 ``` ## Features & Tool Status -### Working Tools -- **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections -- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details -- **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs -- **Session Management** (`close_session`): Properly close browser sessions and clean up resources +**Working Tools:** +> [!TIP] +> - **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections +> - **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details +> - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs +> - **Session Management** (`close_session`): Properly close browser session and clean up resources -### Tools with Known Issues -- **Job Search** (`search_jobs`): Currently experiencing ChromeDriver compatibility issues with LinkedIn's search interface -- **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods -- **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) +**Known Issues:** +> [!WARNING] +> - **Job Search** (`search_jobs`): Compatibility issues with LinkedIn's search interface +> - **Recommended Jobs** (`get_recommended_jobs`): Selenium method compatibility issues +> - **Company Profiles** (`get_company_profile`): Some companies can't be accessed / may return empty results (need further investigation) --- @@ -81,20 +83,13 @@ docker run -i --rm \
โ— Troubleshooting -**Container won't start:** -```bash -# Check Docker is running -docker ps - -# Pull latest image -docker pull stickerdaniel/linkedin-mcp-server -``` +**Docker issues:** +> Make sure [Docker](https://www.docker.com/get-started/) is installed +- Check if Docker is running: `docker ps` **Login issues:** -- Verify credentials are correct -- Check for typos in email/password -- Check if you need to confirm the login in the mobile app - +- Ensure your LinkedIn credentials are set and correct +- LinkedIn may require a login confirmation in the LinkedIn mobile app
## ๐Ÿ“ฆ Claude Desktop (DXT Extension) @@ -107,6 +102,18 @@ docker pull stickerdaniel/linkedin-mcp-server 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately +
+โ— Troubleshooting + +**Docker issues:** +- Make sure [Docker](https://www.docker.com/get-started/) is installed +- Check if Docker is running: `docker ps` + +**Login issues:** +- Ensure your LinkedIn credentials are set and correct +- LinkedIn may require a login confirmation in the LinkedIn mobile app +
+ ## ๐Ÿ Local Setup (Develop & Contribute) **For contributors** who want to modify and debug the code. @@ -172,20 +179,16 @@ uv run main.py --no-headless --no-lazy-init **Scraping issues:** - Use `--no-headless` to see browser actions +- Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call - Add `--debug` to see more detailed logging **ChromeDriver issues:** - Ensure Chrome and ChromeDriver versions match -- Check ChromeDriver is in PATH or set `CHROMEDRIVER_PATH` +- Check ChromeDriver is in PATH or set `CHROMEDRIVER_PATH` in your env **Python issues:** -```bash -# Check Python version -python --version # Should be 3.12+ - -# Reinstall dependencies -uv sync --reinstall -``` +- Check Python version: `uv python --version` (should be 3.12+) +- Reinstall dependencies: `uv sync --reinstall` From e7ae7a818096857337fded17207bfecdbf84b15f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 18:16:20 -0400 Subject: [PATCH 070/565] docs(claude): add CLAUDE.md for development and deployment guidance --- CLAUDE.md | 178 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..4d93e659 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,178 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Development Commands + +### Environment Setup +```bash +# Install UV package manager first +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Install dependencies +uv sync +uv sync --group dev + +# Install pre-commit hooks +uv run pre-commit install +``` + +### Development Workflow +```bash +# Start server in development mode (visible browser, immediate login) +uv run main.py --no-headless --no-lazy-init + +# Start server command for MCP client configurations +uv run main.py --no-setup + +# For debugging, show browser and login immediately +uv run main.py --no-headless --no-lazy-init --debug + +# Run linting +uv run ruff check . +uv run ruff check --fix . + +# Run formatting +uv run ruff format . + +# Check dependencies +uv sync --reinstall +``` + +### Docker Development +```bash +# Build local Docker image +docker build -t linkedin-mcp-server . + +# Run with environment variables +docker run -i --rm \ + -e LINKEDIN_EMAIL="your-email" \ + -e LINKEDIN_PASSWORD="your-password" \ + linkedin-mcp-server +``` + +## Publishing & Release Commands + +### Docker Hub Publishing +```bash +# Build and tag for Docker Hub +docker build -t stickerdaniel/linkedin-mcp-server:latest . +docker build -t stickerdaniel/linkedin-mcp-server:v1.0.0 . + +# Push to Docker Hub +docker push stickerdaniel/linkedin-mcp-server:latest +docker push stickerdaniel/linkedin-mcp-server:v1.0.0 +``` + +### DXT Package Creation +```bash +# Package DXT extension (Desktop Extension for Claude Desktop installation) +bunx @anthropic-ai/dxt pack +``` +# This creates linkedin-mcp-server.dxt file based on manifest.json. Specifications: +- https://github.com/anthropics/dxt/blob/main/README.md - DXT architecture overview, capabilities, and integration patterns +- https://github.com/anthropics/dxt/blob/main/MANIFEST.md - Complete extension manifest structure and field definitions +- https://github.com/anthropics/dxt/tree/main/examples - Reference implementations including a "Hello World" example + + +### GitHub Release +```bash +# Create GitHub release with DXT file +gh release create v1.0.0 linkedin-mcp-server.dxt \ + --title "๐Ÿ“ฆ v1.0.0 - Claude Desktop DXT Extension" \ + --notes "Initial DXT extension release for Claude Desktop users. + +## Claude Desktop DXT Extension +This release contains the `.dxt` extension file for Claude Desktop installation. + +**Installation:** +1. Download the \`linkedin-mcp-server.dxt\` file +2. Double-click to open in Claude Desktop +3. Configure with your LinkedIn credentials + +**Prerequisites:** +- Claude Desktop application +- Docker installed and running + +For other MCP clients, refer to the [Docker setup guide](https://github.com/stickerdaniel/linkedin-mcp-server#-docker-setup-recommended---universal)." + +# List releases +gh release list + +# View specific release +gh release view v1.0.0 +``` + +## Architecture Overview + +This is a Model Context Protocol (MCP) server for LinkedIn integration with the following key architecture: + +### Core Components +- **Entry Point**: `main.py` - Handles initialization, CLI args, and transport setup +- **MCP Server**: `linkedin_mcp_server/server.py` - FastMCP-based server implementation with tool registration +- **Driver Management**: `linkedin_mcp_server/drivers/chrome.py` - Selenium WebDriver session management with LinkedIn authentication +- **Configuration System**: `linkedin_mcp_server/config/` - Layered configuration with CLI args โ†’ env vars โ†’ defaults + +### Tool Implementation +- **Person Tools**: `linkedin_mcp_server/tools/person.py` - Profile scraping (`get_person_profile`) +- **Company Tools**: `linkedin_mcp_server/tools/company.py` - Company analysis (`get_company_profile`) +- **Job Tools**: `linkedin_mcp_server/tools/job.py` - Job details and search (`get_job_details`, `search_jobs`, `get_recommended_jobs`) + +### Configuration Layers (Priority Order) +1. Command line arguments (highest) +2. Environment variables (`LINKEDIN_EMAIL`, `LINKEDIN_PASSWORD`, `CHROMEDRIVER_PATH`) +3. System keyring (secure credential storage) +4. Interactive prompts (development) +5. Auto-detection (ChromeDriver path) + +### Key Design Patterns +- **Singleton Driver**: Global WebDriver instance reused across all tools for session persistence +- **Lazy Initialization**: Driver and login only created when first tool is called (unless `--no-lazy-init`) +- **Secure Credentials**: System keyring integration with fallback to environment variables +- **Resource Cleanup**: Automatic browser session cleanup on shutdown + +### Distribution Methods +- **Docker Container**: Production deployment with pre-configured Chrome/ChromeDriver +- **Claude Desktop DXT**: One-click extension installation via `manifest.json` +- **Local Development**: UV-based Python environment with manual ChromeDriver setup + +## Important Development Notes + +### Credential Handling +- Credentials are NEVER logged or exposed in error messages +- Use system keyring for persistent storage in development +- Environment variables for production/CI +- Interactive prompts only in development mode + +### Browser Automation +- ChromeDriver must match Chrome version exactly +- Auto-detection checks common paths: `/usr/local/bin/chromedriver`, `/usr/bin/chromedriver`, etc. +- Use `--no-headless` for debugging browser automation issues +- LinkedIn login happens automatically with retry logic and 2FA support + +### Tool Development +- All tools follow FastMCP registration pattern in `server.py` +- Tools reuse the global driver instance for session consistency +- Return structured data, not raw HTML +- Handle LinkedIn rate limiting and session expiry gracefully + +### Known Issues +- `search_jobs` and `get_recommended_jobs` have compatibility issues with LinkedIn's current interface +- Some company profiles may be restricted and return empty results +- ChromeDriver version mismatches cause common setup issues + +### Code Quality Standards +- Use UV package manager (not pip/conda) +- Follow commit message format: `type(scope): subject` (see `.cursor/rules/commit-message-instructions.mdc`) +- Run `ruff check --fix .` before committing +- Keep Python 3.12+ compatibility +- All new dependencies must be added to `pyproject.toml` via `uv add` + +### Testing LinkedIn Integration +- Test with personal LinkedIn account first +- Use `--no-headless --debug` to watch browser automation +- LinkedIn may require mobile app confirmation for new login locations +- Session persists across tool calls for performance + +### CLI Tool Integration +Use `linkedin_mcp_server/cli.py` to generate Claude Desktop configuration automatically and copy to clipboard for easy setup. From 30670beda00a8348d779728c93dfa1287c8a3c3d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 14:20:47 -0400 Subject: [PATCH 071/565] docs(readme): small fix and remove CLAUDE.md and update .gitignore to exclude it --- .gitignore | 1 + CLAUDE.md | 178 ----------------------------------------------------- README.md | 2 - 3 files changed, 1 insertion(+), 180 deletions(-) delete mode 100644 CLAUDE.md diff --git a/.gitignore b/.gitignore index 5f505b3a..34cf849a 100644 --- a/.gitignore +++ b/.gitignore @@ -196,3 +196,4 @@ cython_debug/ # claude code settings .claude +CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index 4d93e659..00000000 --- a/CLAUDE.md +++ /dev/null @@ -1,178 +0,0 @@ -# CLAUDE.md - -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. - -## Development Commands - -### Environment Setup -```bash -# Install UV package manager first -curl -LsSf https://astral.sh/uv/install.sh | sh - -# Install dependencies -uv sync -uv sync --group dev - -# Install pre-commit hooks -uv run pre-commit install -``` - -### Development Workflow -```bash -# Start server in development mode (visible browser, immediate login) -uv run main.py --no-headless --no-lazy-init - -# Start server command for MCP client configurations -uv run main.py --no-setup - -# For debugging, show browser and login immediately -uv run main.py --no-headless --no-lazy-init --debug - -# Run linting -uv run ruff check . -uv run ruff check --fix . - -# Run formatting -uv run ruff format . - -# Check dependencies -uv sync --reinstall -``` - -### Docker Development -```bash -# Build local Docker image -docker build -t linkedin-mcp-server . - -# Run with environment variables -docker run -i --rm \ - -e LINKEDIN_EMAIL="your-email" \ - -e LINKEDIN_PASSWORD="your-password" \ - linkedin-mcp-server -``` - -## Publishing & Release Commands - -### Docker Hub Publishing -```bash -# Build and tag for Docker Hub -docker build -t stickerdaniel/linkedin-mcp-server:latest . -docker build -t stickerdaniel/linkedin-mcp-server:v1.0.0 . - -# Push to Docker Hub -docker push stickerdaniel/linkedin-mcp-server:latest -docker push stickerdaniel/linkedin-mcp-server:v1.0.0 -``` - -### DXT Package Creation -```bash -# Package DXT extension (Desktop Extension for Claude Desktop installation) -bunx @anthropic-ai/dxt pack -``` -# This creates linkedin-mcp-server.dxt file based on manifest.json. Specifications: -- https://github.com/anthropics/dxt/blob/main/README.md - DXT architecture overview, capabilities, and integration patterns -- https://github.com/anthropics/dxt/blob/main/MANIFEST.md - Complete extension manifest structure and field definitions -- https://github.com/anthropics/dxt/tree/main/examples - Reference implementations including a "Hello World" example - - -### GitHub Release -```bash -# Create GitHub release with DXT file -gh release create v1.0.0 linkedin-mcp-server.dxt \ - --title "๐Ÿ“ฆ v1.0.0 - Claude Desktop DXT Extension" \ - --notes "Initial DXT extension release for Claude Desktop users. - -## Claude Desktop DXT Extension -This release contains the `.dxt` extension file for Claude Desktop installation. - -**Installation:** -1. Download the \`linkedin-mcp-server.dxt\` file -2. Double-click to open in Claude Desktop -3. Configure with your LinkedIn credentials - -**Prerequisites:** -- Claude Desktop application -- Docker installed and running - -For other MCP clients, refer to the [Docker setup guide](https://github.com/stickerdaniel/linkedin-mcp-server#-docker-setup-recommended---universal)." - -# List releases -gh release list - -# View specific release -gh release view v1.0.0 -``` - -## Architecture Overview - -This is a Model Context Protocol (MCP) server for LinkedIn integration with the following key architecture: - -### Core Components -- **Entry Point**: `main.py` - Handles initialization, CLI args, and transport setup -- **MCP Server**: `linkedin_mcp_server/server.py` - FastMCP-based server implementation with tool registration -- **Driver Management**: `linkedin_mcp_server/drivers/chrome.py` - Selenium WebDriver session management with LinkedIn authentication -- **Configuration System**: `linkedin_mcp_server/config/` - Layered configuration with CLI args โ†’ env vars โ†’ defaults - -### Tool Implementation -- **Person Tools**: `linkedin_mcp_server/tools/person.py` - Profile scraping (`get_person_profile`) -- **Company Tools**: `linkedin_mcp_server/tools/company.py` - Company analysis (`get_company_profile`) -- **Job Tools**: `linkedin_mcp_server/tools/job.py` - Job details and search (`get_job_details`, `search_jobs`, `get_recommended_jobs`) - -### Configuration Layers (Priority Order) -1. Command line arguments (highest) -2. Environment variables (`LINKEDIN_EMAIL`, `LINKEDIN_PASSWORD`, `CHROMEDRIVER_PATH`) -3. System keyring (secure credential storage) -4. Interactive prompts (development) -5. Auto-detection (ChromeDriver path) - -### Key Design Patterns -- **Singleton Driver**: Global WebDriver instance reused across all tools for session persistence -- **Lazy Initialization**: Driver and login only created when first tool is called (unless `--no-lazy-init`) -- **Secure Credentials**: System keyring integration with fallback to environment variables -- **Resource Cleanup**: Automatic browser session cleanup on shutdown - -### Distribution Methods -- **Docker Container**: Production deployment with pre-configured Chrome/ChromeDriver -- **Claude Desktop DXT**: One-click extension installation via `manifest.json` -- **Local Development**: UV-based Python environment with manual ChromeDriver setup - -## Important Development Notes - -### Credential Handling -- Credentials are NEVER logged or exposed in error messages -- Use system keyring for persistent storage in development -- Environment variables for production/CI -- Interactive prompts only in development mode - -### Browser Automation -- ChromeDriver must match Chrome version exactly -- Auto-detection checks common paths: `/usr/local/bin/chromedriver`, `/usr/bin/chromedriver`, etc. -- Use `--no-headless` for debugging browser automation issues -- LinkedIn login happens automatically with retry logic and 2FA support - -### Tool Development -- All tools follow FastMCP registration pattern in `server.py` -- Tools reuse the global driver instance for session consistency -- Return structured data, not raw HTML -- Handle LinkedIn rate limiting and session expiry gracefully - -### Known Issues -- `search_jobs` and `get_recommended_jobs` have compatibility issues with LinkedIn's current interface -- Some company profiles may be restricted and return empty results -- ChromeDriver version mismatches cause common setup issues - -### Code Quality Standards -- Use UV package manager (not pip/conda) -- Follow commit message format: `type(scope): subject` (see `.cursor/rules/commit-message-instructions.mdc`) -- Run `ruff check --fix .` before committing -- Keep Python 3.12+ compatibility -- All new dependencies must be added to `pyproject.toml` via `uv add` - -### Testing LinkedIn Integration -- Test with personal LinkedIn account first -- Use `--no-headless --debug` to watch browser automation -- LinkedIn may require mobile app confirmation for new login locations -- Session persists across tool calls for performance - -### CLI Tool Integration -Use `linkedin_mcp_server/cli.py` to generate Claude Desktop configuration automatically and copy to clipboard for easy setup. diff --git a/README.md b/README.md index b740e895..86d36f94 100644 --- a/README.md +++ b/README.md @@ -116,8 +116,6 @@ docker run -i --rm \ ## ๐Ÿ Local Setup (Develop & Contribute) -**For contributors** who want to modify and debug the code. - **Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed **ChromeDriver Setup:** From d50d7355fbd35eadb1119f33d23b52c49cb503d6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 14:26:24 -0400 Subject: [PATCH 072/565] docs(readme): fix formatting in troubleshooting section for consistency --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 86d36f94..21794306 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ docker run -i --rm \ โ— Troubleshooting **Docker issues:** -> Make sure [Docker](https://www.docker.com/get-started/) is installed +- Make sure [Docker](https://www.docker.com/get-started/) is installed - Check if Docker is running: `docker ps` **Login issues:** From 4994abc39a88f1be24d6eda6333662a0538ef430 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 18:49:13 -0400 Subject: [PATCH 073/565] chore(ci): add GitHub Actions workflow for automated release process --- .github/workflows/release.yml | 157 ++++++++++++++++++++++++++++++++++ .vscode/tasks.json | 20 ----- 2 files changed, 157 insertions(+), 20 deletions(-) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..aadb080b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,157 @@ +# .github/workflows/release.yml +name: Auto Release + +on: + push: + branches: [main] + paths: ['pyproject.toml'] # Only trigger when pyproject.toml changes + +jobs: + check-version-bump: + runs-on: ubuntu-latest + outputs: + should-release: ${{ steps.check.outputs.should-release }} + new-version: ${{ steps.check.outputs.new-version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 # Need to compare with previous commit + + - name: Set up uv + uses: astral-sh/setup-uv@v2 + + - name: Check if version was bumped + id: check + run: | + # Get current version + CURRENT_VERSION=$(uv version | cut -d' ' -f2) + echo "Current version: $CURRENT_VERSION" + + # Get previous version from git (before this commit) + git checkout HEAD~1 -- pyproject.toml || true + PREVIOUS_VERSION=$(uv version | cut -d' ' -f2) 2>/dev/null || echo "0.0.0" + git checkout HEAD -- pyproject.toml + echo "Previous version: $PREVIOUS_VERSION" + + # Check if version actually changed + if [[ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]]; then + echo "โœ… Version bump detected: $PREVIOUS_VERSION โ†’ $CURRENT_VERSION" + echo "should-release=true" >> $GITHUB_OUTPUT + echo "new-version=$CURRENT_VERSION" >> $GITHUB_OUTPUT + else + echo "โ„น๏ธ No version change detected" + echo "should-release=false" >> $GITHUB_OUTPUT + fi + + release: + needs: check-version-bump + if: needs.check-version-bump.outputs.should-release == 'true' + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up uv + uses: astral-sh/setup-uv@v2 + + - name: Set up Bun + uses: oven-sh/setup-bun@v1 + + - name: Create release tag + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git tag "v$VERSION" + git push origin "v$VERSION" + echo "โœ… Created and pushed tag v$VERSION" + + - name: Update manifest.json version + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + run: | + sed -i 's/"version": ".*"/"version": "'$VERSION'"/' manifest.json + echo "โœ… Updated manifest.json to version $VERSION" + + - name: Commit manifest update + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + run: | + git add manifest.json + if git diff --staged --quiet; then + echo "No changes to commit" + else + git commit -m "Auto-update manifest.json to v$VERSION [skip ci]" + git push origin main + fi + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker images + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + uses: docker/build-push-action@v5 + with: + context: . + push: true + tags: | + stickerdaniel/linkedin-mcp-server:${{ env.VERSION }} + stickerdaniel/linkedin-mcp-server:latest + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Set up Bun + uses: oven-sh/setup-bun@v1 + + - name: Build DXT extension + run: bunx @anthropic-ai/dxt pack + + - name: Create GitHub Release + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: softprops/action-gh-release@v1 + with: + tag_name: v${{ env.VERSION }} + files: | + *.dxt + generate_release_notes: true + draft: false + prerelease: false + name: "Release v${{ env.VERSION }}" + body: | + ## LinkedIn MCP Server v${{ env.VERSION }} + + **Docker** + ```bash + docker pull stickerdaniel/linkedin-mcp-server:${{ env.VERSION }} + ``` + + **Claude Desktop DXT Extension** + Download the `.dxt` file below and double-click to install. + + ### What's Changed + See the auto-generated release notes below. + + - name: Summary + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} + run: | + echo "Successfully released v$VERSION!" + echo "Docker: stickerdaniel/linkedin-mcp-server:$VERSION" + echo "GitHub: https://github.com/${{ github.repository }}/releases/tag/v$VERSION" diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 7873fa08..4cbf6ef6 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -86,26 +86,6 @@ "focus": false }, "problemMatcher": [] - }, - { - "label": "bunx @anthropic-ai/dxt pack", - "detail": "Bundle the DXT extension using bunx", - "type": "shell", - "command": "bunx", - "args": [ - "@anthropic-ai/dxt", - "pack" - ], - "group": { - "kind": "build", - "isDefault": false - }, - "presentation": { - "reveal": "always", - "panel": "new", - "focus": true - }, - "problemMatcher": [] } ] } From 7971e1f5d9ddd4297a9ec36bc51c0974534ea631 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 18:50:37 -0400 Subject: [PATCH 074/565] chore(version): bump version to 1.0.1 in pyproject.toml and uv.lock to test deployment --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 149879f3..1cec70d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.0" +version = "1.0.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 4aaa6cb0..4c6d21ca 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.0" +version = "1.0.1" source = { virtual = "." } dependencies = [ { name = "httpx" }, From 81a1e563cce2c78a2f3884deea1e8e88399224ea Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 19:02:30 -0400 Subject: [PATCH 075/565] chore(version): bump version to 1.0.2 in pyproject.toml and uv.lock to test build workflow --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1cec70d8..3ba8a8e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.1" +version = "1.0.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 4c6d21ca..2db906f4 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.1" +version = "1.0.2" source = { virtual = "." } dependencies = [ { name = "httpx" }, From 3e326e66fa208f8929c378a174d636a5dcda4637 Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Wed, 2 Jul 2025 23:02:55 +0000 Subject: [PATCH 076/565] Auto-update manifest.json to v1.0.2 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index ac6b211c..19891d50 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.0", + "version": "1.0.2", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From cd239aef0dba4d0604fd97e380670089b5cf0097 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 20:33:50 -0400 Subject: [PATCH 077/565] chore(release): update release notes and installation instructions in workflow --- .github/workflows/release.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index aadb080b..ee3d4c1d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -133,20 +133,20 @@ jobs: generate_release_notes: true draft: false prerelease: false - name: "Release v${{ env.VERSION }}" + name: "LinkedIn MCP Server v${{ env.VERSION }}" body: | - ## LinkedIn MCP Server v${{ env.VERSION }} + For an installation guide, please refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). - **Docker** + ## Update Docker Image + **Pull this release's image:** ```bash docker pull stickerdaniel/linkedin-mcp-server:${{ env.VERSION }} ``` - **Claude Desktop DXT Extension** - Download the `.dxt` file below and double-click to install. - - ### What's Changed - See the auto-generated release notes below. + ## Update Claude Desktop DXT Extension + 1. Download the `.dxt` file below + 2. Double-click to open in Claude Desktop + 3. Configure with your LinkedIn credentials - name: Summary env: From 8850ecbe9d29dd4c4cbc77a02bfbe076cc01ae69 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 20:34:30 -0400 Subject: [PATCH 078/565] chore(version): bump version to 1.0.3 in pyproject.toml and uv.lock to test release workflow --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3ba8a8e6..55a44a9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.2" +version = "1.0.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 2db906f4..830259f5 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.2" +version = "1.0.3" source = { virtual = "." } dependencies = [ { name = "httpx" }, From fc1a91f0c412ce0b2bb920fe8e61941614740ab8 Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Thu, 3 Jul 2025 00:34:55 +0000 Subject: [PATCH 079/565] Auto-update manifest.json to v1.0.3 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 19891d50..8fe7268b 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.2", + "version": "1.0.3", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 48f2c31e2d103ccd19efa427d23f3ac4f53f770b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 20:41:02 -0400 Subject: [PATCH 080/565] chore(release): update commit message for release workflow manifest version bump --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ee3d4c1d..d82eb888 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -88,7 +88,7 @@ jobs: if git diff --staged --quiet; then echo "No changes to commit" else - git commit -m "Auto-update manifest.json to v$VERSION [skip ci]" + git commit -m "chore(dxt): update manifest.json version to v$VERSION [skip ci]" git push origin main fi From 8d09d9fc93527d3131db2bf955c3fae153b5541d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 20:46:15 -0400 Subject: [PATCH 081/565] chore(release): remove redundant Bun setup step from release workflow --- .github/workflows/release.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d82eb888..027ccbcd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -115,9 +115,6 @@ jobs: cache-from: type=gha cache-to: type=gha,mode=max - - name: Set up Bun - uses: oven-sh/setup-bun@v1 - - name: Build DXT extension run: bunx @anthropic-ai/dxt pack From 8c5deaf48afa817847336db51c16d1afd34f3370 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 21:03:53 -0400 Subject: [PATCH 082/565] chore(release): update uv and Docker actions, optimize caching, and improve tag creation logic --- .github/workflows/release.yml | 49 ++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 027ccbcd..b2297dc3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,9 @@ jobs: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@v2 + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true - name: Check if version was bumped id: check @@ -47,6 +49,8 @@ jobs: needs: check-version-bump if: needs.check-version-bump.outputs.should-release == 'true' runs-on: ubuntu-latest + env: + VERSION: ${{ needs.check-version-bump.outputs.new-version }} permissions: contents: write packages: write @@ -58,38 +62,43 @@ jobs: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@v2 + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true - name: Set up Bun uses: oven-sh/setup-bun@v1 - name: Create release tag - env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} run: | - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - git tag "v$VERSION" - git push origin "v$VERSION" - echo "โœ… Created and pushed tag v$VERSION" + set -e + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + if git tag -l "v$VERSION" | grep -q "v$VERSION"; then + echo "โš ๏ธ Tag v$VERSION already exists, skipping tag creation" + else + git tag "v$VERSION" + git push origin "v$VERSION" + echo "โœ… Created and pushed tag v$VERSION" + fi - name: Update manifest.json version - env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} run: | + set -e sed -i 's/"version": ".*"/"version": "'$VERSION'"/' manifest.json echo "โœ… Updated manifest.json to version $VERSION" - name: Commit manifest update - env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} run: | + set -e git add manifest.json if git diff --staged --quiet; then - echo "No changes to commit" + echo "โ„น๏ธ No changes to commit" else git commit -m "chore(dxt): update manifest.json version to v$VERSION [skip ci]" git push origin main + echo "โœ… Committed manifest.json update" fi - name: Set up Docker Buildx @@ -102,9 +111,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: Build and push Docker images - env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: . push: true @@ -115,14 +122,16 @@ jobs: cache-from: type=gha cache-to: type=gha,mode=max + - name: Optimize uv cache for CI + run: uv cache prune --ci + - name: Build DXT extension run: bunx @anthropic-ai/dxt pack - name: Create GitHub Release env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: tag_name: v${{ env.VERSION }} files: | @@ -146,8 +155,6 @@ jobs: 3. Configure with your LinkedIn credentials - name: Summary - env: - VERSION: ${{ needs.check-version-bump.outputs.new-version }} run: | echo "Successfully released v$VERSION!" echo "Docker: stickerdaniel/linkedin-mcp-server:$VERSION" From c75b7ed58057409373af70c9b8db7ee4490d8045 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 21:04:32 -0400 Subject: [PATCH 083/565] chore(version): bump version to 1.0.4 to test release workflow --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 55a44a9f..3d098423 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.3" +version = "1.0.4" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 830259f5..3aa3dedc 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.3" +version = "1.0.4" source = { virtual = "." } dependencies = [ { name = "httpx" }, From f528a713798037971c89cd0eebd75d2e493ed435 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 3 Jul 2025 01:04:54 +0000 Subject: [PATCH 084/565] chore(dxt): update manifest.json version to v1.0.4 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 8fe7268b..ae0a632d 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.3", + "version": "1.0.4", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From f325586e56dcf3e4dde36c52600ed6674a97978c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 21:16:09 -0400 Subject: [PATCH 085/565] chore(ci): add CI workflow configuration --- .github/workflows/ci.yml | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..fd182d07 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,31 @@ +# .github/workflows/ci.yml +name: CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + lint-and-check: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + + - name: Install dependencies + run: | + uv sync + uv sync --group dev + + - name: Run pre-commit hooks + uses: pre-commit/action@v3.0.1 + + - name: Optimize uv cache for CI + run: uv cache prune --ci From 299f147f3b49403329bd10ffa07cbfe720f37b8e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 21:18:08 -0400 Subject: [PATCH 086/565] chore(release): refine installation guide wording in release notes --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b2297dc3..8001cd68 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -141,7 +141,7 @@ jobs: prerelease: false name: "LinkedIn MCP Server v${{ env.VERSION }}" body: | - For an installation guide, please refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). + For an installation guide, refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). ## Update Docker Image **Pull this release's image:** From 7097ece04264a42c6b7a97f95591498744132ee2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 2 Jul 2025 21:19:03 -0400 Subject: [PATCH 087/565] chore(version): bump version to 1.0.5 in pyproject.toml and uv.lock --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3d098423..5e113ea1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.4" +version = "1.0.5" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 3aa3dedc..4e0e51b4 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.4" +version = "1.0.5" source = { virtual = "." } dependencies = [ { name = "httpx" }, From fed12db1927ba7393eb5dcc621fa6459566db78d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 3 Jul 2025 01:19:28 +0000 Subject: [PATCH 088/565] chore(dxt): update manifest.json version to v1.0.5 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index ae0a632d..47f91790 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.4", + "version": "1.0.5", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From eda69cf73ff2642b7e89d42434d7de4cb23c2d99 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 3 Jul 2025 21:19:27 -0400 Subject: [PATCH 089/565] feat(mcp): implement http transport support --- .vscode/tasks.json | 30 +++ linkedin_mcp_server/config/loaders.py | 34 ++- linkedin_mcp_server/config/schema.py | 6 +- linkedin_mcp_server/server.py | 2 +- linkedin_mcp_server/tools/company.py | 2 +- linkedin_mcp_server/tools/job.py | 2 +- linkedin_mcp_server/tools/person.py | 2 +- main.py | 17 +- pyproject.toml | 3 +- smithery.yaml | 29 --- uv.lock | 315 +++++++++++++++++++++----- 11 files changed, 350 insertions(+), 92 deletions(-) delete mode 100644 smithery.yaml diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4cbf6ef6..0822f3a2 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -68,6 +68,36 @@ }, "problemMatcher": [] }, + { + "label": "uv run main.py --transport streamable-http --no-setup", + "detail": "Start HTTP MCP server on localhost:8000/mcp", + "type": "shell", + "command": "uv", + "args": [ + "run", + "main.py", + "--transport", + "streamable-http", + "--host", + "127.0.0.1", + "--port", + "8000", + "--path", + "/mcp", + "--no-setup" + ], + "isBackground": true, + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, { "label": "uv run tail -n 20 -F ~/Library/Logs/Claude/mcp*.log", "detail": "Follow Claude Desktop logs", diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 3871d4e9..bd2476be 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -80,9 +80,30 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--transport", - choices=["stdio", "sse"], + choices=["stdio", "streamable-http"], default=None, - help="Specify the transport mode (stdio or sse)", + help="Specify the transport mode (stdio or streamable-http)", + ) + + parser.add_argument( + "--host", + type=str, + default=None, + help="HTTP server host (default: 127.0.0.1)", + ) + + parser.add_argument( + "--port", + type=int, + default=None, + help="HTTP server port (default: 8000)", + ) + + parser.add_argument( + "--path", + type=str, + default=None, + help="HTTP server path (default: /mcp)", ) parser.add_argument( @@ -109,6 +130,15 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.transport: config.server.transport = args.transport + if args.host: + config.server.host = args.host + + if args.port: + config.server.port = args.port + + if args.path: + config.server.path = args.path + if args.chromedriver: config.chrome.chromedriver_path = args.chromedriver diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 8d92585a..55d912f5 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -26,10 +26,14 @@ class LinkedInConfig: class ServerConfig: """MCP server configuration.""" - transport: Literal["stdio", "sse"] = "stdio" + transport: Literal["stdio", "streamable-http"] = "stdio" lazy_init: bool = True debug: bool = False setup: bool = True + # HTTP transport configuration + host: str = "127.0.0.1" + port: int = 8000 + path: str = "/mcp" @dataclass diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 8cb959a6..8ec067e8 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -6,7 +6,7 @@ """ from typing import Dict, Any -from mcp.server.fastmcp import FastMCP +from fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import active_drivers from linkedin_mcp_server.tools.person import register_person_tools diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 071797d2..38e3f23c 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -6,7 +6,7 @@ """ from typing import Dict, Any, List -from mcp.server.fastmcp import FastMCP +from fastmcp import FastMCP from linkedin_scraper import Company from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 88c3f027..3c3f1180 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -8,7 +8,7 @@ from typing import Any, Dict, List from linkedin_scraper import Job, JobSearch -from mcp.server.fastmcp import FastMCP +from fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 70957c93..e917c34b 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -6,7 +6,7 @@ """ from typing import Dict, Any, List -from mcp.server.fastmcp import FastMCP +from fastmcp import FastMCP from linkedin_scraper import Person from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/main.py b/main.py index 351dc3ea..de886fa4 100644 --- a/main.py +++ b/main.py @@ -15,7 +15,7 @@ from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -def choose_transport_interactive() -> Literal["stdio", "sse"]: +def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: """Prompt user for transport mode using inquirer.""" questions = [ inquirer.List( @@ -23,7 +23,7 @@ def choose_transport_interactive() -> Literal["stdio", "sse"]: message="Choose mcp transport mode", choices=[ ("stdio (Default CLI mode)", "stdio"), - ("sse (Server-Sent Events HTTP mode)", "sse"), + ("streamable-http (HTTP server mode)", "streamable-http"), ], default="stdio", ) @@ -67,7 +67,18 @@ def main() -> None: # Start server print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") - mcp.run(transport=transport) + if transport == "streamable-http": + print( + f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" + ) + mcp.run( + transport=transport, + host=config.server.host, + port=config.server.port, + path=config.server.path, + ) + else: + mcp.run(transport=transport) def exit_gracefully(exit_code: int = 0) -> None: diff --git a/pyproject.toml b/pyproject.toml index 5e113ea1..db1d7165 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,11 +5,10 @@ description = "MCP server for LinkedIn profile, company, and job scraping with C readme = "README.md" requires-python = ">=3.12" dependencies = [ - "httpx>=0.28.1", + "fastmcp>=2.10.1", "inquirer>=3.4.0", "keyring>=25.6.0", "linkedin-scraper", - "mcp[cli]>=1.6.0", "pyperclip>=1.9.0", ] diff --git a/smithery.yaml b/smithery.yaml deleted file mode 100644 index 548429bd..00000000 --- a/smithery.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# Smithery configuration file: https://smithery.ai/docs/build/project-config -version: 1 -start: - command: - - docker - - run - - -i - - --rm - - -e - - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} - - -e - - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} - - stickerdaniel/linkedin-mcp-server -configSchema: - # JSON Schema defining the configuration options for the MCP. - type: object - properties: - LINKEDIN_EMAIL: - type: string - description: Email for LinkedIn login - LINKEDIN_PASSWORD: - type: string - description: Password for LinkedIn login - required: - - LINKEDIN_EMAIL - - LINKEDIN_PASSWORD -exampleConfig: - LINKEDIN_EMAIL: example.user@example.com - LINKEDIN_PASSWORD: yourLinkedInPassword diff --git a/uv.lock b/uv.lock index 4e0e51b4..f93975a1 100644 --- a/uv.lock +++ b/uv.lock @@ -43,6 +43,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] +[[package]] +name = "authlib" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/9d/b1e08d36899c12c8b894a44a5583ee157789f26fc4b176f8e4b6217b56e1/authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210", size = 158371, upload-time = "2025-05-23T00:21:45.011Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, +] + [[package]] name = "blessed" version = "1.20.0" @@ -75,6 +87,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, @@ -84,6 +98,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, @@ -211,6 +227,7 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" }, { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, @@ -220,6 +237,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" }, + { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" }, { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, @@ -229,6 +249,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, ] [[package]] @@ -240,6 +262,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + [[package]] name = "editor" version = "1.6.6" @@ -253,6 +284,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, ] +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "fastmcp" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib" }, + { name = "exceptiongroup" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "openapi-pydantic" }, + { name = "pydantic", extra = ["email"] }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/1f/0031ea07bcad9f9b38d3500772d2749ca2b16335b92bd012f1d2f86a853e/fastmcp-2.10.1.tar.gz", hash = "sha256:450c72e523926a2203c7eecdb4a8b0507506667bc8736b8b7bb44f6312424649", size = 2730387, upload-time = "2025-07-02T04:57:24.981Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/a2/52ef74287ec5fe0e5a0ffedde7d0809da5ec3ac85f4e3f2ed5587b39471a/fastmcp-2.10.1-py3-none-any.whl", hash = "sha256:17d0acea04eeb3464c9eca42b6774fb06b38b72cface9af6a7482b3aa561db13", size = 182108, upload-time = "2025-07-02T04:57:23.529Z" }, +] + [[package]] name = "filelock" version = "3.18.0" @@ -403,6 +479,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload-time = "2024-07-31T22:39:17.426Z" }, ] +[[package]] +name = "jsonschema" +version = "4.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + [[package]] name = "keyring" version = "25.6.0" @@ -425,11 +528,10 @@ name = "linkedin-mcp-server" version = "1.0.5" source = { virtual = "." } dependencies = [ - { name = "httpx" }, + { name = "fastmcp" }, { name = "inquirer" }, { name = "keyring" }, { name = "linkedin-scraper" }, - { name = "mcp", extra = ["cli"] }, { name = "pyperclip" }, ] @@ -444,11 +546,10 @@ dev = [ [package.metadata] requires-dist = [ - { name = "httpx", specifier = ">=0.28.1" }, + { name = "fastmcp", specifier = ">=2.10.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, - { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, ] @@ -527,27 +628,23 @@ wheels = [ [[package]] name = "mcp" -version = "1.6.0" +version = "1.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "python-multipart" }, { name = "sse-starlette" }, { name = "starlette" }, - { name = "uvicorn" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload-time = "2025-03-27T16:46:32.336Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload-time = "2025-03-27T16:46:29.919Z" }, -] - -[package.optional-dependencies] -cli = [ - { name = "python-dotenv" }, - { name = "typer" }, + { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, ] [[package]] @@ -577,6 +674,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "openapi-pydantic" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, +] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -643,7 +752,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.3" +version = "2.11.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -651,51 +760,56 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload-time = "2025-04-08T13:27:06.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload-time = "2025-04-08T13:27:03.789Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, ] [[package]] name = "pydantic-core" -version = "2.33.1" +version = "2.33.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload-time = "2025-04-02T09:49:41.8Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload-time = "2025-04-02T09:47:25.394Z" }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload-time = "2025-04-02T09:47:27.417Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload-time = "2025-04-02T09:47:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload-time = "2025-04-02T09:47:33.464Z" }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload-time = "2025-04-02T09:47:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload-time = "2025-04-02T09:47:37.315Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload-time = "2025-04-02T09:47:39.013Z" }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload-time = "2025-04-02T09:47:40.427Z" }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload-time = "2025-04-02T09:47:42.01Z" }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload-time = "2025-04-02T09:47:43.425Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload-time = "2025-04-02T09:47:44.979Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload-time = "2025-04-02T09:47:46.843Z" }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload-time = "2025-04-02T09:47:48.404Z" }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload-time = "2025-04-02T09:47:49.839Z" }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload-time = "2025-04-02T09:47:51.648Z" }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload-time = "2025-04-02T09:47:53.149Z" }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload-time = "2025-04-02T09:47:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload-time = "2025-04-02T09:47:56.532Z" }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload-time = "2025-04-02T09:47:58.088Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload-time = "2025-04-02T09:47:59.591Z" }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload-time = "2025-04-02T09:48:01.397Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload-time = "2025-04-02T09:48:03.056Z" }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload-time = "2025-04-02T09:48:04.662Z" }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload-time = "2025-04-02T09:48:06.226Z" }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload-time = "2025-04-02T09:48:08.114Z" }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload-time = "2025-04-02T09:48:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload-time = "2025-04-02T09:48:11.288Z" }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload-time = "2025-04-02T09:48:12.861Z" }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload-time = "2025-04-02T09:48:14.553Z" }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload-time = "2025-04-02T09:48:16.222Z" }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload-time = "2025-04-02T09:48:17.97Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, ] [[package]] @@ -772,6 +886,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -816,6 +939,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + [[package]] name = "requests" version = "2.32.3" @@ -844,6 +981,82 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] +[[package]] +name = "rpds-py" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, + { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, + { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, + { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, + { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, + { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, + { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, + { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, + { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, + { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, + { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, + { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, + { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, + { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, + { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, + { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, + { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, +] + [[package]] name = "ruff" version = "0.11.11" From 3c4004aa5e16d5b410094fb83d88740d658ed5b5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 3 Jul 2025 21:35:24 -0400 Subject: [PATCH 090/565] feat(smithery): add Dockerfile and main server implementation for HTTP transport --- Dockerfile.smithery | 31 +++++++++++++ smithery.yaml | 20 +++++++++ smithery_main.py | 105 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 156 insertions(+) create mode 100644 Dockerfile.smithery create mode 100644 smithery.yaml create mode 100644 smithery_main.py diff --git a/Dockerfile.smithery b/Dockerfile.smithery new file mode 100644 index 00000000..27c2aa09 --- /dev/null +++ b/Dockerfile.smithery @@ -0,0 +1,31 @@ +FROM python:3.12-alpine + +# Install system dependencies including Chromium and ChromeDriver +RUN apk add --no-cache \ + git \ + curl \ + chromium \ + chromium-chromedriver + +# Install uv from official image +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +# Set working directory +WORKDIR /app + +# Copy project files +COPY . /app + +# Sync dependencies and install project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen + +# Create a non-root user +RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app +USER mcpuser + +# Expose the port that will be set via PORT env var +EXPOSE 8000 + +# Smithery command - uses HTTP transport and PORT env var +CMD ["uv", "run", "python", "smithery_main.py"] diff --git a/smithery.yaml b/smithery.yaml new file mode 100644 index 00000000..f2d6286c --- /dev/null +++ b/smithery.yaml @@ -0,0 +1,20 @@ +runtime: "container" +build: + dockerfile: "Dockerfile.smithery" # Smithery-specific Dockerfile + dockerBuildPath: "." # Docker build context +startCommand: + type: "http" + configSchema: # JSON Schema for configuration + type: "object" + properties: + linkedin_email: + type: "string" + description: "LinkedIn email address for authentication" + linkedin_password: + type: "string" + description: "LinkedIn password for authentication" + sensitive: true + required: ["linkedin_email", "linkedin_password"] + exampleConfig: + linkedin_email: "user@example.com" + linkedin_password: "password123" diff --git a/smithery_main.py b/smithery_main.py new file mode 100644 index 00000000..67f550f5 --- /dev/null +++ b/smithery_main.py @@ -0,0 +1,105 @@ +# smithery_main.py +""" +LinkedIn MCP Server - Smithery HTTP Transport Entry Point + +This entry point is specifically designed for Smithery deployment with: +- HTTP transport (streamable-http) +- Query parameter configuration parsing +- PORT environment variable support +- Uses existing lazy authentication system +""" + +import os +import logging +from urllib.parse import parse_qs + +from linkedin_mcp_server.config import get_config, reset_config +from linkedin_mcp_server.drivers.chrome import initialize_driver +from linkedin_mcp_server.server import create_mcp_server, shutdown_handler + + +def setup_smithery_environment(query_string: str | None = None) -> None: + """ + Set up environment variables from Smithery query parameters. + + Args: + query_string: Query parameters from Smithery configuration + """ + if not query_string: + return + + # Parse query parameters + parsed = parse_qs(query_string) + + # Map Smithery parameters to environment variables + param_mapping = { + "linkedin_email": "LINKEDIN_EMAIL", + "linkedin_password": "LINKEDIN_PASSWORD", + } + + for param, env_var in param_mapping.items(): + if param in parsed and parsed[param]: + value = parsed[param][0] # Take first value + os.environ[env_var] = value + + # Reset config to pick up new environment variables + reset_config() + + +def main() -> None: + """ + Main entry point for Smithery deployment. + + Starts HTTP server listening on PORT environment variable. + Uses existing lazy initialization system. + """ + print("๐Ÿ”— LinkedIn MCP Server (Smithery) ๐Ÿ”—") + print("=" * 40) + + # Get PORT from environment (Smithery requirement) + port = int(os.environ.get("PORT", 8000)) + + # Set up environment for Smithery (can be called with query params later) + # For now, just ensure we're in the right mode + os.environ["DEBUG"] = os.environ.get("DEBUG", "false") + + # Force HTTP transport and container-friendly settings + os.environ.setdefault("TRANSPORT", "streamable-http") + + # Get configuration (will use lazy_init=True by default) + config = get_config() + + # Configure logging + log_level = logging.DEBUG if config.server.debug else logging.ERROR + logging.basicConfig( + level=log_level, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + + logger = logging.getLogger("linkedin_mcp_server") + logger.info(f"Starting Smithery MCP server on port {port}") + + # Initialize driver (will use lazy init by default - perfect for Smithery!) + initialize_driver() + + # Create MCP server (tools will be available for discovery) + mcp = create_mcp_server() + + # Start HTTP server + print("\n๐Ÿš€ Running LinkedIn MCP server (Smithery HTTP mode)...") + print(f"๐Ÿ“ก HTTP server listening on http://0.0.0.0:{port}/mcp") + print("๐Ÿ”ง Tools available for discovery - credentials validated on use") + + try: + mcp.run(transport="streamable-http", host="0.0.0.0", port=port, path="/mcp") + except KeyboardInterrupt: + print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") + shutdown_handler() + except Exception as e: + print(f"โŒ Error running MCP server: {e}") + shutdown_handler() + raise + + +if __name__ == "__main__": + main() From 6df2bf915c6ca0981c4805e02e71dd432653ec59 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 3 Jul 2025 22:24:48 -0400 Subject: [PATCH 091/565] feat(tests): add pytest configuration and tests package for LinkedIn MCP server chore(dependencies): add pytest-asyncio to development dependencies chore(smithery): update main server to handle query parameter configuration and improve logging chore(vscode): add task for running pytest tests in VSCode --- .vscode/tasks.json | 24 ++++++- pyproject.toml | 1 + smithery.yaml | 6 +- smithery_main.py | 37 ++++++---- tests/__init__.py | 1 + tests/conftest.py | 39 +++++++++++ tests/test_mcp_http.py | 153 +++++++++++++++++++++++++++++++++++++++++ uv.lock | 14 ++++ 8 files changed, 256 insertions(+), 19 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_mcp_http.py diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 0822f3a2..1ecd3857 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,6 +1,28 @@ { "version": "2.0.0", "tasks": [ + { + "label": "uv run pytest tests/", + "detail": "Run pytest tests for LinkedIn MCP server", + "type": "shell", + "command": "uv", + "args": [ + "run", + "pytest", + "tests/", + "-v" + ], + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, { "label": "uv run pre-commit run --all-files", "detail": "Run pre-commit hooks on all files", @@ -14,7 +36,7 @@ ], "group": { "kind": "test", - "isDefault": true + "isDefault": false }, "presentation": { "reveal": "never", diff --git a/pyproject.toml b/pyproject.toml index db1d7165..9ca27192 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } dev = [ "pre-commit>=4.2.0", "pytest>=8.3.5", + "pytest-asyncio>=1.0.0", "pytest-cov>=6.1.1", "ruff>=0.11.11", "ty>=0.0.1a12", diff --git a/smithery.yaml b/smithery.yaml index f2d6286c..f0c9d438 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -1,10 +1,10 @@ runtime: "container" build: - dockerfile: "Dockerfile.smithery" # Smithery-specific Dockerfile - dockerBuildPath: "." # Docker build context + dockerfile: "Dockerfile.smithery" + dockerBuildPath: "." startCommand: type: "http" - configSchema: # JSON Schema for configuration + configSchema: type: "object" properties: linkedin_email: diff --git a/smithery_main.py b/smithery_main.py index 67f550f5..cc9cec3f 100644 --- a/smithery_main.py +++ b/smithery_main.py @@ -51,7 +51,7 @@ def main() -> None: Main entry point for Smithery deployment. Starts HTTP server listening on PORT environment variable. - Uses existing lazy initialization system. + Handles query parameter configuration as required by Smithery Custom Deploy. """ print("๐Ÿ”— LinkedIn MCP Server (Smithery) ๐Ÿ”—") print("=" * 40) @@ -59,44 +59,51 @@ def main() -> None: # Get PORT from environment (Smithery requirement) port = int(os.environ.get("PORT", 8000)) - # Set up environment for Smithery (can be called with query params later) - # For now, just ensure we're in the right mode - os.environ["DEBUG"] = os.environ.get("DEBUG", "false") - - # Force HTTP transport and container-friendly settings + # Force settings for Smithery compatibility + os.environ["DEBUG"] = "false" # No debug logs in production os.environ.setdefault("TRANSPORT", "streamable-http") - # Get configuration (will use lazy_init=True by default) - config = get_config() + # Ensure we don't try to use keyring in containers + os.environ.setdefault("LINKEDIN_EMAIL", "") + os.environ.setdefault("LINKEDIN_PASSWORD", "") + + # Initialize configuration (will use lazy_init=True by default) + get_config() - # Configure logging - log_level = logging.DEBUG if config.server.debug else logging.ERROR + # Configure minimal logging for containers logging.basicConfig( - level=log_level, + level=logging.ERROR, # Only errors, no debug/info spam format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", ) logger = logging.getLogger("linkedin_mcp_server") - logger.info(f"Starting Smithery MCP server on port {port}") + logger.error(f"Starting Smithery MCP server on port {port}") - # Initialize driver (will use lazy init by default - perfect for Smithery!) + # Initialize driver with lazy loading (no immediate credentials needed) initialize_driver() - # Create MCP server (tools will be available for discovery) + # Create MCP server (tools will be registered and available for discovery) mcp = create_mcp_server() # Start HTTP server print("\n๐Ÿš€ Running LinkedIn MCP server (Smithery HTTP mode)...") print(f"๐Ÿ“ก HTTP server listening on http://0.0.0.0:{port}/mcp") - print("๐Ÿ”ง Tools available for discovery - credentials validated on use") + print("๐Ÿ”ง Tools available for discovery - no credentials required") + print("โš™๏ธ Configure linkedin_email and linkedin_password to use tools") try: + # Add a startup delay to ensure everything is ready + import time + + time.sleep(1) + mcp.run(transport="streamable-http", host="0.0.0.0", port=port, path="/mcp") except KeyboardInterrupt: print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") shutdown_handler() except Exception as e: print(f"โŒ Error running MCP server: {e}") + print(f"Stack trace: {e.__class__.__name__}: {str(e)}") shutdown_handler() raise diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..65140f2e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# tests package diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..aa437c6c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,39 @@ +# tests/conftest.py +""" +Simple pytest configuration for LinkedIn MCP server tests. +""" + +import os +import pytest +from linkedin_mcp_server.config import reset_config + + +@pytest.fixture(autouse=True) +def clean_environment(): + """Clean environment before each test.""" + # Reset configuration singleton + reset_config() + + # Clear environment variables that might affect tests + env_vars_to_clear = [ + "LINKEDIN_EMAIL", + "LINKEDIN_PASSWORD", + "DEBUG", + "CHROMEDRIVER", + "HEADLESS", + "TRANSPORT", + ] + original_env = {} + for var in env_vars_to_clear: + original_env[var] = os.environ.get(var) + if var in os.environ: + del os.environ[var] + + yield + + # Restore environment variables + for var, value in original_env.items(): + if value is not None: + os.environ[var] = value + elif var in os.environ: + del os.environ[var] diff --git a/tests/test_mcp_http.py b/tests/test_mcp_http.py new file mode 100644 index 00000000..48a9f0fc --- /dev/null +++ b/tests/test_mcp_http.py @@ -0,0 +1,153 @@ +# tests/test_mcp_http.py +""" +Test that the MCP server HTTP transport works and tools are accessible. +""" + +import pytest +import asyncio +from unittest.mock import patch +from fastmcp.client import Client +from linkedin_mcp_server.server import create_mcp_server + + +@pytest.mark.asyncio +async def test_mcp_server_tools_accessible(): + """Test that MCP server tools are accessible via in-memory client.""" + # Mock sys.argv to avoid pytest argument parsing conflicts + with patch("sys.argv", ["main.py"]): + # Create MCP server + mcp = create_mcp_server() + + # Connect client directly to server (in-memory) + async with Client(mcp) as client: + # Test that we can list tools + tools = await client.list_tools() + + # Verify expected LinkedIn tools are present + tool_names = [tool.name for tool in tools] + expected_tools = [ + "get_person_profile", + "get_company_profile", + "get_job_details", + "close_session", + ] + + for expected_tool in expected_tools: + assert expected_tool in tool_names, ( + f"Tool '{expected_tool}' not found in {tool_names}" + ) + + print(f"โœ… Found {len(tools)} tools: {tool_names}") + + +@pytest.mark.asyncio +async def test_tools_have_proper_schemas(): + """Test that tools have proper input schemas.""" + with patch("sys.argv", ["main.py"]): + mcp = create_mcp_server() + + async with Client(mcp) as client: + tools = await client.list_tools() + + # Check each tool has required properties + for tool in tools: + assert tool.name is not None + assert tool.description is not None + assert len(tool.description) > 0 + + if tool.name in [ + "get_person_profile", + "get_company_profile", + "get_job_details", + ]: + # These tools should have input schemas + assert tool.inputSchema is not None + assert "properties" in tool.inputSchema + + print(f"โœ… All {len(tools)} tools have proper schemas") + + +@pytest.mark.asyncio +async def test_close_session_tool_works(): + """Test that close_session tool can be called successfully.""" + with patch("sys.argv", ["main.py"]): + mcp = create_mcp_server() + + async with Client(mcp) as client: + # Call close_session tool (should work without credentials) + result = await client.call_tool("close_session") + + assert result.content is not None + assert len(result.content) > 0 + + response = result.content[0] + assert response.type == "text" + assert len(response.text) > 0 + + print(f"โœ… close_session tool response: {response.text[:100]}...") + + +@pytest.mark.asyncio +async def test_tools_fail_gracefully_without_credentials(): + """Test that LinkedIn tools fail gracefully when no credentials provided.""" + # Mock sys.argv to avoid pytest argument parsing conflicts + with patch("sys.argv", ["main.py"]): + # Mock the driver creation to avoid WebDriver initialization + with patch( + "linkedin_mcp_server.drivers.chrome.get_or_create_driver" + ) as mock_driver: + mock_driver.return_value = None # Simulate no driver available + + mcp = create_mcp_server() + + async with Client(mcp) as client: + # Try to call a LinkedIn tool without credentials + # This should either return an error message or raise an exception gracefully + try: + result = await client.call_tool( + "get_person_profile", + {"linkedin_url": "https://www.linkedin.com/in/test-user/"}, + ) + + # If no exception, check that result indicates missing credentials + assert result.content is not None + response = result.content[0] + + # Should mention credentials, driver, or login issues + error_keywords = [ + "credential", + "driver", + "login", + "error", + "failed", + ] + assert any( + keyword in response.text.lower() for keyword in error_keywords + ), f"Expected error message about credentials, got: {response.text}" + + print(f"โœ… Tool failed gracefully: {response.text[:100]}...") + + except Exception as e: + # Exception is also acceptable - means proper error handling + print(f"โœ… Tool raised exception (acceptable): {str(e)[:100]}...") + + +def test_mcp_server_creation(): + """Test that MCP server can be created successfully.""" + with patch("sys.argv", ["main.py"]): + mcp = create_mcp_server() + + assert mcp is not None + assert mcp.name == "linkedin_scraper" + + print("โœ… MCP server created successfully") + + +if __name__ == "__main__": + # Run tests manually if executed directly + asyncio.run(test_mcp_server_tools_accessible()) + asyncio.run(test_tools_have_proper_schemas()) + asyncio.run(test_close_session_tool_works()) + asyncio.run(test_tools_fail_gracefully_without_credentials()) + test_mcp_server_creation() + print("๐ŸŽ‰ All tests passed!") diff --git a/uv.lock b/uv.lock index f93975a1..6233bbbf 100644 --- a/uv.lock +++ b/uv.lock @@ -539,6 +539,7 @@ dependencies = [ dev = [ { name = "pre-commit" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "ruff" }, { name = "ty" }, @@ -557,6 +558,7 @@ requires-dist = [ dev = [ { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "ruff", specifier = ">=0.11.11" }, { name = "ty", specifier = ">=0.0.1a12" }, @@ -864,6 +866,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, ] +[[package]] +name = "pytest-asyncio" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, +] + [[package]] name = "pytest-cov" version = "6.1.1" From c0d6e371d7e969fa09fa646dec06bc16b5bda26c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 3 Jul 2025 23:08:54 -0400 Subject: [PATCH 092/565] fix(smithery): implement SmitheryConfigMiddleware for query parameter handling and add tests --- pyproject.toml | 1 + smithery_main.py | 96 +++++++--- tests/test_smithery_config.py | 138 +++++++++++++++ uv.lock | 320 ++++++++++++++++++++++++++++++++++ 4 files changed, 533 insertions(+), 22 deletions(-) create mode 100644 tests/test_smithery_config.py diff --git a/pyproject.toml b/pyproject.toml index 9ca27192..4e109bba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } [dependency-groups] dev = [ + "aiohttp>=3.12.13", "pre-commit>=4.2.0", "pytest>=8.3.5", "pytest-asyncio>=1.0.0", diff --git a/smithery_main.py b/smithery_main.py index cc9cec3f..b897f96f 100644 --- a/smithery_main.py +++ b/smithery_main.py @@ -12,38 +12,87 @@ import os import logging from urllib.parse import parse_qs +from fastmcp.server.middleware import Middleware, MiddlewareContext from linkedin_mcp_server.config import get_config, reset_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -def setup_smithery_environment(query_string: str | None = None) -> None: +class SmitheryConfigMiddleware(Middleware): """ - Set up environment variables from Smithery query parameters. + FastMCP middleware to handle Smithery query parameter configuration. - Args: - query_string: Query parameters from Smithery configuration + Intercepts HTTP requests and extracts configuration from query parameters, + then temporarily sets environment variables for the duration of the request. """ - if not query_string: - return - # Parse query parameters - parsed = parse_qs(query_string) - - # Map Smithery parameters to environment variables - param_mapping = { - "linkedin_email": "LINKEDIN_EMAIL", - "linkedin_password": "LINKEDIN_PASSWORD", - } - - for param, env_var in param_mapping.items(): - if param in parsed and parsed[param]: - value = parsed[param][0] # Take first value - os.environ[env_var] = value - - # Reset config to pick up new environment variables - reset_config() + def __init__(self): + super().__init__() + self.param_mapping = { + "linkedin_email": "LINKEDIN_EMAIL", + "linkedin_password": "LINKEDIN_PASSWORD", + } + + async def on_call_tool(self, context: MiddlewareContext, call_next): + """ + Called before each tool execution. + Extract configuration from HTTP request query parameters. + """ + # Store original environment variables + original_env = {} + for env_var in self.param_mapping.values(): + original_env[env_var] = os.environ.get(env_var) + + # Extract query parameters from the request context + query_params = self._extract_query_params(context) + + if query_params: + # Apply configuration from query parameters + self._apply_config(query_params) + + # Reset configuration to pick up new environment variables + reset_config() + + try: + # Execute the tool with the new configuration + result = await call_next(context) + return result + finally: + # Restore original environment variables + self._restore_env(original_env) + + def _extract_query_params(self, context: MiddlewareContext) -> dict: + """Extract query parameters from the request context.""" + # Check if we can access FastMCP context for HTTP transport + if hasattr(context, "fastmcp_context") and context.fastmcp_context: + # Check if there's transport-specific information + if hasattr(context.fastmcp_context, "transport_info"): + transport_info = context.fastmcp_context.transport_info + if hasattr(transport_info, "query_params"): + return dict(transport_info.query_params) + + # Try to get from environment if set by HTTP server + query_string = os.environ.get("QUERY_STRING", "") + if query_string: + return {k: v[0] for k, v in parse_qs(query_string).items()} + + return {} + + def _apply_config(self, query_params: dict): + """Apply configuration from query parameters to environment variables.""" + for param, env_var in self.param_mapping.items(): + if param in query_params and query_params[param]: + os.environ[env_var] = query_params[param] + print(f"๐Ÿ”ง Applied config: {param} -> {env_var}") + + def _restore_env(self, original_env: dict): + """Restore original environment variables.""" + for env_var, original_value in original_env.items(): + if original_value is not None: + os.environ[env_var] = original_value + elif env_var in os.environ: + del os.environ[env_var] def main() -> None: @@ -85,6 +134,9 @@ def main() -> None: # Create MCP server (tools will be registered and available for discovery) mcp = create_mcp_server() + # Add Smithery configuration middleware + mcp.add_middleware(SmitheryConfigMiddleware()) + # Start HTTP server print("\n๐Ÿš€ Running LinkedIn MCP server (Smithery HTTP mode)...") print(f"๐Ÿ“ก HTTP server listening on http://0.0.0.0:{port}/mcp") diff --git a/tests/test_smithery_config.py b/tests/test_smithery_config.py new file mode 100644 index 00000000..811e872b --- /dev/null +++ b/tests/test_smithery_config.py @@ -0,0 +1,138 @@ +# tests/test_smithery_config.py +""" +Test Smithery configuration parameter passing. +""" + +import pytest +import os +from unittest.mock import patch, MagicMock +from fastmcp.client import Client +from fastmcp.server.middleware import MiddlewareContext +from linkedin_mcp_server.server import create_mcp_server +from smithery_main import SmitheryConfigMiddleware + + +@pytest.mark.asyncio +async def test_smithery_middleware_extracts_config(): + """Test that SmitheryConfigMiddleware correctly extracts configuration from query parameters.""" + middleware = SmitheryConfigMiddleware() + + # Mock MiddlewareContext with query parameters via environment + context = MagicMock(spec=MiddlewareContext) + context.fastmcp_context = None + + # Set query string in environment to simulate HTTP request + os.environ["QUERY_STRING"] = ( + "linkedin_email=test@example.com&linkedin_password=testpass123" + ) + + # Mock call_next + async def mock_call_next(ctx): + # During tool execution, check that env vars are set + assert os.environ.get("LINKEDIN_EMAIL") == "test@example.com" + assert os.environ.get("LINKEDIN_PASSWORD") == "testpass123" + return MagicMock() + + # Store original env vars + original_email = os.environ.get("LINKEDIN_EMAIL") + original_password = os.environ.get("LINKEDIN_PASSWORD") + original_query_string = os.environ.get("QUERY_STRING") + + try: + # Execute middleware + await middleware.on_call_tool(context, mock_call_next) + + # After execution, env vars should be restored + assert os.environ.get("LINKEDIN_EMAIL") == original_email + assert os.environ.get("LINKEDIN_PASSWORD") == original_password + + print("โœ… Smithery middleware correctly handles configuration") + + finally: + # Cleanup + if original_email is not None: + os.environ["LINKEDIN_EMAIL"] = original_email + elif "LINKEDIN_EMAIL" in os.environ: + del os.environ["LINKEDIN_EMAIL"] + + if original_password is not None: + os.environ["LINKEDIN_PASSWORD"] = original_password + elif "LINKEDIN_PASSWORD" in os.environ: + del os.environ["LINKEDIN_PASSWORD"] + + if original_query_string is not None: + os.environ["QUERY_STRING"] = original_query_string + elif "QUERY_STRING" in os.environ: + del os.environ["QUERY_STRING"] + + +@pytest.mark.asyncio +async def test_smithery_middleware_with_empty_config(): + """Test that middleware works correctly with no configuration.""" + middleware = SmitheryConfigMiddleware() + + # Mock context with no query parameters + context = MagicMock(spec=MiddlewareContext) + context.fastmcp_context = None + + # Mock call_next + async def mock_call_next(ctx): + return MagicMock() + + # Should not raise any errors + result = await middleware.on_call_tool(context, mock_call_next) + assert result is not None + + print("โœ… Smithery middleware handles empty configuration") + + +@pytest.mark.asyncio +async def test_smithery_server_with_middleware(): + """Test that MCP server with Smithery middleware can be created and tools discovered.""" + with patch("sys.argv", ["smithery_main.py"]): + # Create server (simulate smithery_main.py) + mcp = create_mcp_server() + + # Add middleware + mcp.add_middleware(SmitheryConfigMiddleware()) + + # Test that tools are discoverable + async with Client(mcp) as client: + tools = await client.list_tools() + + tool_names = [tool.name for tool in tools] + expected_tools = [ + "get_person_profile", + "get_company_profile", + "get_job_details", + "close_session", + ] + + for expected_tool in expected_tools: + assert expected_tool in tool_names, f"Tool '{expected_tool}' not found" + + print(f"โœ… Smithery server with middleware: {len(tools)} tools discovered") + + +def test_smithery_middleware_param_mapping(): + """Test that SmitheryConfigMiddleware has correct parameter mapping.""" + middleware = SmitheryConfigMiddleware() + + expected_mapping = { + "linkedin_email": "LINKEDIN_EMAIL", + "linkedin_password": "LINKEDIN_PASSWORD", + } + + assert middleware.param_mapping == expected_mapping + print("โœ… Smithery middleware parameter mapping is correct") + + +if __name__ == "__main__": + # Run tests manually if executed directly + import asyncio + + asyncio.run(test_smithery_middleware_extracts_config()) + asyncio.run(test_smithery_middleware_with_empty_config()) + asyncio.run(test_smithery_server_with_middleware()) + test_smithery_middleware_param_mapping() + print("๐ŸŽ‰ All Smithery configuration tests passed!") diff --git a/uv.lock b/uv.lock index 6233bbbf..11a06d54 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,79 @@ version = 1 revision = 2 requires-python = ">=3.12" +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, + { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, + { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, + { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, + { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, + { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910, upload-time = "2025-06-14T15:14:30.604Z" }, + { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566, upload-time = "2025-06-14T15:14:32.275Z" }, + { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856, upload-time = "2025-06-14T15:14:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683, upload-time = "2025-06-14T15:14:36.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946, upload-time = "2025-06-14T15:14:38Z" }, + { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017, upload-time = "2025-06-14T15:14:39.951Z" }, + { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390, upload-time = "2025-06-14T15:14:42.151Z" }, + { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719, upload-time = "2025-06-14T15:14:44.039Z" }, + { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424, upload-time = "2025-06-14T15:14:45.945Z" }, + { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447, upload-time = "2025-06-14T15:14:47.911Z" }, + { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110, upload-time = "2025-06-14T15:14:50.334Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706, upload-time = "2025-06-14T15:14:52.378Z" }, + { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839, upload-time = "2025-06-14T15:14:54.617Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311, upload-time = "2025-06-14T15:14:56.597Z" }, + { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202, upload-time = "2025-06-14T15:14:58.598Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794, upload-time = "2025-06-14T15:15:00.939Z" }, + { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735, upload-time = "2025-06-14T15:15:02.858Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -338,6 +411,66 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, ] +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + [[package]] name = "h11" version = "0.14.0" @@ -537,6 +670,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "aiohttp" }, { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -556,6 +690,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "aiohttp", specifier = ">=3.12.13" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-asyncio", specifier = ">=1.0.0" }, @@ -667,6 +802,69 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] +[[package]] +name = "multidict" +version = "6.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, + { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, + { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, + { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, + { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, + { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, + { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, + { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, + { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, + { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, + { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, + { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, + { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, + { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, + { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, + { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, + { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, + { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, + { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, + { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, + { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, +] + [[package]] name = "nodeenv" version = "1.9.1" @@ -743,6 +941,63 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + [[package]] name = "pycparser" version = "2.22" @@ -1370,3 +1625,68 @@ sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e62 wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload-time = "2024-01-04T18:03:16.078Z" }, ] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] From 8082f304a36fa08f1e3bfa8a3e9ff7f3f828c311 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 04:34:29 -0400 Subject: [PATCH 093/565] fix(smithery): url query parsing and created proper ASGI application using FastMCP's http_app() --- Dockerfile.smithery | 10 ++ SMITHERY_FIX_SUMMARY.md | 109 ++++++++++++ linkedin_mcp_server/config/loaders.py | 12 ++ smithery.yaml | 2 +- smithery_main.py | 234 +++++++++++--------------- test_imports.py | 45 +++++ test_local_smithery.sh | 71 ++++++++ test_simple.sh | 11 ++ test_smithery.py | 124 ++++++++++++++ tests/test_smithery_config.py | 132 ++++++--------- 10 files changed, 538 insertions(+), 212 deletions(-) create mode 100644 SMITHERY_FIX_SUMMARY.md create mode 100644 test_imports.py create mode 100755 test_local_smithery.sh create mode 100644 test_simple.sh create mode 100644 test_smithery.py diff --git a/Dockerfile.smithery b/Dockerfile.smithery index 27c2aa09..878a4f48 100644 --- a/Dockerfile.smithery +++ b/Dockerfile.smithery @@ -20,6 +20,16 @@ COPY . /app RUN --mount=type=cache,target=/root/.cache/uv \ uv sync --frozen +# Set ChromeDriver path for Alpine +ENV CHROMEDRIVER_PATH=/usr/bin/chromedriver + +# Set environment variables for Smithery +ENV LAZY_INIT=true \ + NON_INTERACTIVE=true \ + HEADLESS=true \ + TRANSPORT=streamable-http \ + DEBUG=false + # Create a non-root user RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser diff --git a/SMITHERY_FIX_SUMMARY.md b/SMITHERY_FIX_SUMMARY.md new file mode 100644 index 00000000..0e9985ee --- /dev/null +++ b/SMITHERY_FIX_SUMMARY.md @@ -0,0 +1,109 @@ +# Smithery Deployment Fix Summary + +## Issues Identified and Fixed + +### 1. **Tool Discovery Failure** +- **Problem**: Smithery couldn't scan tools from the server, getting "TypeError: fetch failed" +- **Root Cause**: Server wasn't properly handling HTTP requests for tool discovery +- **Fix**: Created proper ASGI app using FastMCP's `http_app()` method with middleware support + +### 2. **Configuration Handling** +- **Problem**: Server expected environment variables, but Smithery passes config as query parameters +- **Root Cause**: Misunderstanding of how Smithery passes configuration +- **Fix**: Implemented Starlette middleware to extract query parameters and update environment + +### 3. **Middleware Implementation** +- **Problem**: Initial attempt used incorrect FastMCP middleware API +- **Root Cause**: Used `@mcp.middleware()` decorator which doesn't exist +- **Fix**: Used proper Starlette middleware passed to `http_app()` method + +### 4. **Server Startup** +- **Problem**: Server needed to start without credentials for tool discovery +- **Root Cause**: Lazy initialization wasn't properly configured +- **Fix**: Ensured all environment variables are set for lazy init before imports + +## Key Changes Made + +### 1. **smithery_main.py** +```python +# Proper ASGI app creation with middleware +def create_app(): + mcp = create_mcp_server() + middleware = [Middleware(SmitheryConfigMiddleware)] + app = mcp.http_app(path="/mcp", middleware=middleware, transport="streamable-http") + return app + +# Use uvicorn to run the ASGI app +uvicorn.run(app, host="0.0.0.0", port=port) +``` + +### 2. **Configuration Updates** +- Updated `loaders.py` to support `LAZY_INIT` and `NON_INTERACTIVE` env vars +- Made credentials optional in `smithery.yaml` for tool discovery + +### 3. **Dockerfile.smithery** +- Added `CHROMEDRIVER_PATH` environment variable +- Set all required environment variables for Smithery mode + +## How Smithery Integration Works + +1. **Tool Discovery Phase**: + - Smithery sends requests to `/mcp` without credentials + - Server must respond with available tools list + - No Chrome driver or authentication needed + +2. **Tool Execution Phase**: + - Smithery passes credentials as query parameters: `/mcp?linkedin_email=...&linkedin_password=...` + - Middleware extracts these and updates environment + - Chrome driver is initialized only when tools are actually called + +3. **Configuration Flow**: + ``` + Smithery UI โ†’ Query Parameters โ†’ Middleware โ†’ Environment Variables โ†’ Config Reset โ†’ Tool Execution + ``` + +## Testing Commands + +```bash +# Local testing +chmod +x test_local_smithery.sh +./test_local_smithery.sh + +# Manual server start +PORT=8000 uv run python smithery_main.py + +# Test tool discovery +curl -X POST http://localhost:8000/mcp \ + -H "Content-Type: application/json" \ + -d '{"jsonrpc":"2.0","method":"tools/list","params":{},"id":1}' +``` + +## Deployment Steps + +1. **Commit and push changes**: + ```bash + git add -A + git commit -m "Fix Smithery deployment - proper query param handling and lazy init" + git push origin feat/smithery-http-transport + ``` + +2. **Monitor Smithery deployment**: + - Check Docker build succeeds + - Verify "Tool scanning" passes + - Test connection with credentials + +## Key Principles + +1. **Lazy Loading**: No resources initialized until needed +2. **Query Parameter Config**: Smithery passes config via URL params, not env vars +3. **ASGI Application**: Use FastMCP's `http_app()` for proper HTTP handling +4. **Middleware**: Use Starlette middleware for HTTP request processing +5. **Non-Interactive**: No prompts or user input in container environment + +## Troubleshooting + +If deployment still fails: +1. Check Smithery logs for specific errors +2. Ensure ChromeDriver is available at `/usr/bin/chromedriver` in container +3. Verify all Python dependencies are installed +4. Test locally with `test_local_smithery.sh` first diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index bd2476be..08183638 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -44,6 +44,18 @@ def load_from_env(config: AppConfig) -> AppConfig: # Headless mode if os.environ.get("HEADLESS") in ("0", "false", "False", "no", "No"): config.chrome.headless = False + elif os.environ.get("HEADLESS") in ("1", "true", "True", "yes", "Yes"): + config.chrome.headless = True + + # Non-interactive mode + if os.environ.get("NON_INTERACTIVE") in ("1", "true", "True", "yes", "Yes"): + config.chrome.non_interactive = True + + # Lazy initialization + if os.environ.get("LAZY_INIT") in ("1", "true", "True", "yes", "Yes"): + config.server.lazy_init = True + elif os.environ.get("LAZY_INIT") in ("0", "false", "False", "no", "No"): + config.server.lazy_init = False return config diff --git a/smithery.yaml b/smithery.yaml index f0c9d438..81b15a24 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -14,7 +14,7 @@ startCommand: type: "string" description: "LinkedIn password for authentication" sensitive: true - required: ["linkedin_email", "linkedin_password"] + required: [] # Make them optional to allow tool discovery without credentials exampleConfig: linkedin_email: "user@example.com" linkedin_password: "password123" diff --git a/smithery_main.py b/smithery_main.py index b897f96f..dbfeef73 100644 --- a/smithery_main.py +++ b/smithery_main.py @@ -1,164 +1,130 @@ +#!/usr/bin/env python3 # smithery_main.py """ LinkedIn MCP Server - Smithery HTTP Transport Entry Point -This entry point is specifically designed for Smithery deployment with: -- HTTP transport (streamable-http) -- Query parameter configuration parsing -- PORT environment variable support -- Uses existing lazy authentication system +Handles Smithery's query parameter configuration approach. +Smithery passes config as query params: /mcp?linkedin_email=user@example.com&linkedin_password=pass """ -import os import logging -from urllib.parse import parse_qs -from fastmcp.server.middleware import Middleware, MiddlewareContext - -from linkedin_mcp_server.config import get_config, reset_config -from linkedin_mcp_server.drivers.chrome import initialize_driver -from linkedin_mcp_server.server import create_mcp_server, shutdown_handler - - -class SmitheryConfigMiddleware(Middleware): - """ - FastMCP middleware to handle Smithery query parameter configuration. - - Intercepts HTTP requests and extracts configuration from query parameters, - then temporarily sets environment variables for the duration of the request. - """ - - def __init__(self): - super().__init__() - self.param_mapping = { - "linkedin_email": "LINKEDIN_EMAIL", - "linkedin_password": "LINKEDIN_PASSWORD", - } - - async def on_call_tool(self, context: MiddlewareContext, call_next): - """ - Called before each tool execution. - Extract configuration from HTTP request query parameters. - """ - # Store original environment variables - original_env = {} - for env_var in self.param_mapping.values(): - original_env[env_var] = os.environ.get(env_var) - - # Extract query parameters from the request context - query_params = self._extract_query_params(context) +import os +import sys - if query_params: - # Apply configuration from query parameters - self._apply_config(query_params) +import uvicorn - # Reset configuration to pick up new environment variables - reset_config() +# Set up environment for lazy initialization +os.environ.setdefault("LINKEDIN_EMAIL", "") +os.environ.setdefault("LINKEDIN_PASSWORD", "") +os.environ.setdefault("LAZY_INIT", "true") +os.environ.setdefault("NON_INTERACTIVE", "true") +os.environ.setdefault("HEADLESS", "true") +os.environ.setdefault("TRANSPORT", "streamable-http") +os.environ.setdefault("DEBUG", "false") +os.environ.setdefault("CHROMEDRIVER_PATH", "/usr/bin/chromedriver") - try: - # Execute the tool with the new configuration - result = await call_next(context) - return result - finally: - # Restore original environment variables - self._restore_env(original_env) - - def _extract_query_params(self, context: MiddlewareContext) -> dict: - """Extract query parameters from the request context.""" - # Check if we can access FastMCP context for HTTP transport - if hasattr(context, "fastmcp_context") and context.fastmcp_context: - # Check if there's transport-specific information - if hasattr(context.fastmcp_context, "transport_info"): - transport_info = context.fastmcp_context.transport_info - if hasattr(transport_info, "query_params"): - return dict(transport_info.query_params) - - # Try to get from environment if set by HTTP server - query_string = os.environ.get("QUERY_STRING", "") - if query_string: - return {k: v[0] for k, v in parse_qs(query_string).items()} - - return {} - - def _apply_config(self, query_params: dict): - """Apply configuration from query parameters to environment variables.""" - for param, env_var in self.param_mapping.items(): - if param in query_params and query_params[param]: - os.environ[env_var] = query_params[param] - print(f"๐Ÿ”ง Applied config: {param} -> {env_var}") - - def _restore_env(self, original_env: dict): - """Restore original environment variables.""" - for env_var, original_value in original_env.items(): - if original_value is not None: - os.environ[env_var] = original_value - elif env_var in os.environ: - del os.environ[env_var] +# Configure logging +logging.basicConfig( + level=logging.INFO if os.environ.get("DEBUG") == "true" else logging.ERROR, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +# Suppress noisy libraries +for logger_name in ["selenium", "urllib3", "httpx", "httpcore"]: + logging.getLogger(logger_name).setLevel(logging.ERROR) -def main() -> None: - """ - Main entry point for Smithery deployment. +logger = logging.getLogger("smithery_main") - Starts HTTP server listening on PORT environment variable. - Handles query parameter configuration as required by Smithery Custom Deploy. - """ - print("๐Ÿ”— LinkedIn MCP Server (Smithery) ๐Ÿ”—") - print("=" * 40) +# Import after environment setup +from starlette.middleware import Middleware # noqa: E402 +from starlette.middleware.base import BaseHTTPMiddleware # noqa: E402 +from starlette.requests import Request # noqa: E402 +from starlette.responses import Response # noqa: E402 - # Get PORT from environment (Smithery requirement) - port = int(os.environ.get("PORT", 8000)) +from linkedin_mcp_server.config import reset_config # noqa: E402 +from linkedin_mcp_server.server import create_mcp_server # noqa: E402 - # Force settings for Smithery compatibility - os.environ["DEBUG"] = "false" # No debug logs in production - os.environ.setdefault("TRANSPORT", "streamable-http") - # Ensure we don't try to use keyring in containers - os.environ.setdefault("LINKEDIN_EMAIL", "") - os.environ.setdefault("LINKEDIN_PASSWORD", "") +class SmitheryConfigMiddleware(BaseHTTPMiddleware): + """Extract Smithery query parameters and update environment.""" - # Initialize configuration (will use lazy_init=True by default) - get_config() + async def dispatch(self, request: Request, call_next) -> Response: + """Process query parameters before handling the request.""" + # Extract query parameters + query_params = dict(request.query_params) - # Configure minimal logging for containers - logging.basicConfig( - level=logging.ERROR, # Only errors, no debug/info spam - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - ) + # Log incoming request for debugging + logger.info(f"Incoming request: {request.method} {request.url}") + logger.info(f"Query params: {query_params}") + + # Update environment if credentials are provided + if query_params: + # Check for linkedin credentials in query params + if "linkedin_email" in query_params: + os.environ["LINKEDIN_EMAIL"] = query_params["linkedin_email"] + logger.info("Updated LINKEDIN_EMAIL from query params") + + if "linkedin_password" in query_params: + os.environ["LINKEDIN_PASSWORD"] = query_params["linkedin_password"] + logger.info("Updated LINKEDIN_PASSWORD from query params") + + # Reset config to pick up new values + reset_config() - logger = logging.getLogger("linkedin_mcp_server") - logger.error(f"Starting Smithery MCP server on port {port}") + # Process the request + response = await call_next(request) + return response - # Initialize driver with lazy loading (no immediate credentials needed) - initialize_driver() - # Create MCP server (tools will be registered and available for discovery) +def create_app(): + """Create the FastMCP ASGI application with Smithery middleware.""" + # Create MCP server mcp = create_mcp_server() - # Add Smithery configuration middleware - mcp.add_middleware(SmitheryConfigMiddleware()) + # Create middleware list + middleware = [Middleware(SmitheryConfigMiddleware)] - # Start HTTP server - print("\n๐Ÿš€ Running LinkedIn MCP server (Smithery HTTP mode)...") - print(f"๐Ÿ“ก HTTP server listening on http://0.0.0.0:{port}/mcp") - print("๐Ÿ”ง Tools available for discovery - no credentials required") - print("โš™๏ธ Configure linkedin_email and linkedin_password to use tools") + # Create HTTP app with middleware + app = mcp.http_app(path="/mcp", middleware=middleware, transport="streamable-http") - try: - # Add a startup delay to ensure everything is ready - import time + return app - time.sleep(1) - mcp.run(transport="streamable-http", host="0.0.0.0", port=port, path="/mcp") - except KeyboardInterrupt: - print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") - shutdown_handler() - except Exception as e: - print(f"โŒ Error running MCP server: {e}") - print(f"Stack trace: {e.__class__.__name__}: {str(e)}") - shutdown_handler() - raise +def main() -> None: + """Main entry point for Smithery deployment.""" + print("๐Ÿ”— LinkedIn MCP Server (Smithery Edition) ๐Ÿ”—") + print("=" * 50) + + # Get PORT from environment (Smithery requirement) + port = int(os.environ.get("PORT", 8000)) + + # Create the app + app = create_app() + + print(f"\n๐Ÿš€ Starting server on port {port}...") + print(f"๐Ÿ“ก Server endpoint: http://0.0.0.0:{port}/mcp") + print("๐Ÿ”ง Tools available for discovery") + print("โš™๏ธ Config via query params: ?linkedin_email=...&linkedin_password=...") + print("\nโœจ Server is starting...\n") + + # Run with uvicorn + uvicorn.run( + app, + host="0.0.0.0", + port=port, + log_level="error" if os.environ.get("DEBUG") != "true" else "info", + ) if __name__ == "__main__": - main() + try: + main() + except KeyboardInterrupt: + print("\n๐Ÿ‘‹ Shutting down...") + sys.exit(0) + except Exception as e: + print(f"โŒ Fatal error: {e}") + import traceback + + traceback.print_exc() + sys.exit(1) diff --git a/test_imports.py b/test_imports.py new file mode 100644 index 00000000..9cba4fb9 --- /dev/null +++ b/test_imports.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +"""Test minimal imports and server creation.""" + +import os + +# Set environment variables first +os.environ["LINKEDIN_EMAIL"] = "" +os.environ["LINKEDIN_PASSWORD"] = "" +os.environ["LAZY_INIT"] = "true" +os.environ["NON_INTERACTIVE"] = "true" +os.environ["HEADLESS"] = "true" + +print("1. Environment variables set") + +try: + from linkedin_mcp_server.config import get_config + + print("2. Config imported successfully") + + config = get_config() + print( + f"3. Config loaded: lazy_init={config.server.lazy_init}, non_interactive={config.chrome.non_interactive}" + ) + + from linkedin_mcp_server.drivers.chrome import initialize_driver + + print("4. Chrome driver module imported") + + initialize_driver() + print("5. Driver initialized (should be lazy)") + + from linkedin_mcp_server.server import create_mcp_server + + print("6. Server module imported") + + mcp = create_mcp_server() + print("7. MCP server created") + + print("\nโœ… All imports and initialization successful!") + +except Exception as e: + print(f"\nโŒ Error at step: {e}") + import traceback + + traceback.print_exc() diff --git a/test_local_smithery.sh b/test_local_smithery.sh new file mode 100755 index 00000000..6e3e1c12 --- /dev/null +++ b/test_local_smithery.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# Local test script for Smithery configuration + +echo "๐Ÿ” Testing LinkedIn MCP Server (Smithery mode)..." +echo "================================================" + +# Set environment variables +export PORT=8000 +export LAZY_INIT=true +export NON_INTERACTIVE=true +export HEADLESS=true +export DEBUG=false + +# Start the server in background +echo "Starting server on port $PORT..." +uv run python smithery_main.py & +SERVER_PID=$! + +# Wait for server to start +echo "Waiting for server to start..." +sleep 5 + +# Test the server +echo -e "\n๐Ÿ“ก Testing server endpoints..." + +# Test 1: Initialize without credentials +echo -e "\n1. Testing initialize endpoint (no credentials)..." +curl -X POST "http://localhost:${PORT}/mcp" \ + -H "Content-Type: application/json" \ + -d '{ + "jsonrpc": "2.0", + "method": "initialize", + "params": { + "protocolVersion": "0.1.0", + "capabilities": {}, + "clientInfo": { + "name": "test-client", + "version": "1.0.0" + } + }, + "id": 0 + }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" + +# Test 2: List tools without credentials +echo -e "\n2. Testing tools/list endpoint (no credentials)..." +curl -X POST "http://localhost:${PORT}/mcp" \ + -H "Content-Type: application/json" \ + -d '{ + "jsonrpc": "2.0", + "method": "tools/list", + "params": {}, + "id": 1 + }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" + +# Test 3: Test with query parameters (simulating Smithery) +echo -e "\n3. Testing with query parameters (Smithery style)..." +curl -X POST "http://localhost:${PORT}/mcp?linkedin_email=test@example.com&linkedin_password=testpass" \ + -H "Content-Type: application/json" \ + -d '{ + "jsonrpc": "2.0", + "method": "tools/list", + "params": {}, + "id": 2 + }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" + +# Clean up +echo -e "\n๐Ÿ›‘ Stopping server..." +kill $SERVER_PID 2>/dev/null +wait $SERVER_PID 2>/dev/null + +echo -e "\nโœ… Test complete!" diff --git a/test_simple.sh b/test_simple.sh new file mode 100644 index 00000000..7d7cb4fc --- /dev/null +++ b/test_simple.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# Simple test to see if server starts + +export PORT=8000 +export LAZY_INIT=true +export NON_INTERACTIVE=true +export HEADLESS=true +export DEBUG=false + +echo "Starting server..." +uv run python smithery_main.py diff --git a/test_smithery.py b/test_smithery.py new file mode 100644 index 00000000..acc46df8 --- /dev/null +++ b/test_smithery.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +"""Test script to verify Smithery tool discovery works correctly.""" + +import asyncio +import httpx +import json +import sys + + +async def test_tool_discovery(): + """Test that the MCP server exposes its tools correctly.""" + base_url = "http://localhost:8000/mcp" + + print("๐Ÿ” Testing MCP Server Tool Discovery...") + print(f"๐Ÿ“ก Server URL: {base_url}") + print() + + async with httpx.AsyncClient(timeout=30.0) as client: + try: + # First, try to list available tools + print("1. Testing tool listing...") + + # MCP protocol request to list tools + request_data = { + "jsonrpc": "2.0", + "method": "tools/list", + "params": {}, + "id": 1, + } + + response = await client.post( + base_url, + json=request_data, + headers={"Content-Type": "application/json"}, + ) + + print(f" Response status: {response.status_code}") + + if response.status_code == 200: + data = response.json() + print(f" Response: {json.dumps(data, indent=2)}") + + if "result" in data and "tools" in data["result"]: + tools = data["result"]["tools"] + print(f"\nโœ… Found {len(tools)} tools:") + for tool in tools: + print( + f" - {tool.get('name', 'Unknown')}: {tool.get('description', 'No description')}" + ) + else: + print("โŒ No tools found in response") + else: + print(f"โŒ Server returned error: {response.text}") + + except Exception as e: + print(f"โŒ Error testing server: {e}") + return False + + return True + + +async def test_server_info(): + """Test basic server information endpoint.""" + base_url = "http://localhost:8000/mcp" + + print("\n2. Testing server info...") + + async with httpx.AsyncClient(timeout=30.0) as client: + try: + # Try to get server information + request_data = { + "jsonrpc": "2.0", + "method": "initialize", + "params": { + "protocolVersion": "0.1.0", + "capabilities": {}, + "clientInfo": {"name": "test-client", "version": "1.0.0"}, + }, + "id": 0, + } + + response = await client.post( + base_url, + json=request_data, + headers={"Content-Type": "application/json"}, + ) + + print(f" Response status: {response.status_code}") + + if response.status_code == 200: + data = response.json() + print(f" Server info: {json.dumps(data, indent=2)}") + print("โœ… Server initialization successful") + else: + print(f"โŒ Server returned error: {response.text}") + + except Exception as e: + print(f"โŒ Error testing server: {e}") + return False + + return True + + +async def main(): + """Run all tests.""" + print("=" * 50) + print("LinkedIn MCP Server - Smithery Test") + print("=" * 50) + + # Test server info first + if not await test_server_info(): + print("\nโš ๏ธ Server may not be running or configured correctly") + sys.exit(1) + + # Then test tool discovery + if not await test_tool_discovery(): + print("\nโš ๏ธ Tool discovery failed") + sys.exit(1) + + print("\nโœ… All tests passed!") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/tests/test_smithery_config.py b/tests/test_smithery_config.py index 811e872b..faa71a28 100644 --- a/tests/test_smithery_config.py +++ b/tests/test_smithery_config.py @@ -3,48 +3,48 @@ Test Smithery configuration parameter passing. """ -import pytest import os -from unittest.mock import patch, MagicMock -from fastmcp.client import Client -from fastmcp.server.middleware import MiddlewareContext -from linkedin_mcp_server.server import create_mcp_server -from smithery_main import SmitheryConfigMiddleware +from unittest.mock import MagicMock + +import pytest +from starlette.applications import Starlette +from starlette.requests import Request +from starlette.responses import PlainTextResponse + +from smithery_main import SmitheryConfigMiddleware, create_app @pytest.mark.asyncio async def test_smithery_middleware_extracts_config(): """Test that SmitheryConfigMiddleware correctly extracts configuration from query parameters.""" - middleware = SmitheryConfigMiddleware() - - # Mock MiddlewareContext with query parameters via environment - context = MagicMock(spec=MiddlewareContext) - context.fastmcp_context = None - - # Set query string in environment to simulate HTTP request - os.environ["QUERY_STRING"] = ( - "linkedin_email=test@example.com&linkedin_password=testpass123" - ) + # Create a simple Starlette app for testing + app = Starlette() + middleware = SmitheryConfigMiddleware(app) + + # Create a mock request with query parameters + request = MagicMock(spec=Request) + request.method = "GET" + request.url = "http://test.com/mcp?linkedin_email=test@example.com&linkedin_password=testpass123" + request.query_params = { + "linkedin_email": "test@example.com", + "linkedin_password": "testpass123", + } - # Mock call_next - async def mock_call_next(ctx): - # During tool execution, check that env vars are set + # Mock call_next function + async def mock_call_next(req): + # During middleware execution, check that env vars are set assert os.environ.get("LINKEDIN_EMAIL") == "test@example.com" assert os.environ.get("LINKEDIN_PASSWORD") == "testpass123" - return MagicMock() + return PlainTextResponse("OK") # Store original env vars original_email = os.environ.get("LINKEDIN_EMAIL") original_password = os.environ.get("LINKEDIN_PASSWORD") - original_query_string = os.environ.get("QUERY_STRING") try: # Execute middleware - await middleware.on_call_tool(context, mock_call_next) - - # After execution, env vars should be restored - assert os.environ.get("LINKEDIN_EMAIL") == original_email - assert os.environ.get("LINKEDIN_PASSWORD") == original_password + response = await middleware.dispatch(request, mock_call_next) + assert response.status_code == 200 print("โœ… Smithery middleware correctly handles configuration") @@ -60,71 +60,49 @@ async def mock_call_next(ctx): elif "LINKEDIN_PASSWORD" in os.environ: del os.environ["LINKEDIN_PASSWORD"] - if original_query_string is not None: - os.environ["QUERY_STRING"] = original_query_string - elif "QUERY_STRING" in os.environ: - del os.environ["QUERY_STRING"] - @pytest.mark.asyncio async def test_smithery_middleware_with_empty_config(): """Test that middleware works correctly with no configuration.""" - middleware = SmitheryConfigMiddleware() + # Create a simple Starlette app for testing + app = Starlette() + middleware = SmitheryConfigMiddleware(app) - # Mock context with no query parameters - context = MagicMock(spec=MiddlewareContext) - context.fastmcp_context = None + # Create a mock request with no query parameters + request = MagicMock(spec=Request) + request.method = "GET" + request.url = "http://test.com/mcp" + request.query_params = {} - # Mock call_next - async def mock_call_next(ctx): - return MagicMock() + # Mock call_next function + async def mock_call_next(req): + return PlainTextResponse("OK") # Should not raise any errors - result = await middleware.on_call_tool(context, mock_call_next) - assert result is not None + response = await middleware.dispatch(request, mock_call_next) + assert response.status_code == 200 print("โœ… Smithery middleware handles empty configuration") -@pytest.mark.asyncio -async def test_smithery_server_with_middleware(): - """Test that MCP server with Smithery middleware can be created and tools discovered.""" - with patch("sys.argv", ["smithery_main.py"]): - # Create server (simulate smithery_main.py) - mcp = create_mcp_server() - - # Add middleware - mcp.add_middleware(SmitheryConfigMiddleware()) - - # Test that tools are discoverable - async with Client(mcp) as client: - tools = await client.list_tools() +def test_smithery_app_creation(): + """Test that Smithery app can be created successfully.""" + app = create_app() + assert app is not None + print("โœ… Smithery app creation successful") - tool_names = [tool.name for tool in tools] - expected_tools = [ - "get_person_profile", - "get_company_profile", - "get_job_details", - "close_session", - ] - for expected_tool in expected_tools: - assert expected_tool in tool_names, f"Tool '{expected_tool}' not found" +def test_smithery_middleware_param_handling(): + """Test that SmitheryConfigMiddleware correctly handles different parameter scenarios.""" + # Create a simple Starlette app for testing + app = Starlette() + middleware = SmitheryConfigMiddleware(app) - print(f"โœ… Smithery server with middleware: {len(tools)} tools discovered") - - -def test_smithery_middleware_param_mapping(): - """Test that SmitheryConfigMiddleware has correct parameter mapping.""" - middleware = SmitheryConfigMiddleware() - - expected_mapping = { - "linkedin_email": "LINKEDIN_EMAIL", - "linkedin_password": "LINKEDIN_PASSWORD", - } + # Test that middleware can be instantiated + assert middleware is not None + assert hasattr(middleware, "dispatch") - assert middleware.param_mapping == expected_mapping - print("โœ… Smithery middleware parameter mapping is correct") + print("โœ… Smithery middleware parameter handling is correct") if __name__ == "__main__": @@ -133,6 +111,6 @@ def test_smithery_middleware_param_mapping(): asyncio.run(test_smithery_middleware_extracts_config()) asyncio.run(test_smithery_middleware_with_empty_config()) - asyncio.run(test_smithery_server_with_middleware()) - test_smithery_middleware_param_mapping() + test_smithery_app_creation() + test_smithery_middleware_param_handling() print("๐ŸŽ‰ All Smithery configuration tests passed!") From d1afd21fd0002227bc5d744a867e132e0f284937 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 13:37:53 -0400 Subject: [PATCH 094/565] fix(smithery): require LinkedIn credentials for authentication --- smithery.yaml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/smithery.yaml b/smithery.yaml index 81b15a24..53c95145 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -1,7 +1,4 @@ runtime: "container" -build: - dockerfile: "Dockerfile.smithery" - dockerBuildPath: "." startCommand: type: "http" configSchema: @@ -14,7 +11,7 @@ startCommand: type: "string" description: "LinkedIn password for authentication" sensitive: true - required: [] # Make them optional to allow tool discovery without credentials - exampleConfig: - linkedin_email: "user@example.com" - linkedin_password: "password123" + required: [linkedin_email, linkedin_password] +build: + dockerfile: "Dockerfile.smithery" + dockerBuildPath: "." From 17c630bbaccf84e7c88295bd8a6fb8083a96018a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 16:55:41 -0400 Subject: [PATCH 095/565] fix(http): cleanup smithery deployment (not working) and fix http transport --- .vscode/tasks.json | 18 +-- Dockerfile.smithery | 41 ------- SMITHERY_FIX_SUMMARY.md | 109 ------------------ linkedin_mcp_server/cli.py | 7 +- linkedin_mcp_server/config/secrets.py | 7 +- linkedin_mcp_server/drivers/chrome.py | 10 +- linkedin_mcp_server/server.py | 5 +- linkedin_mcp_server/tools/company.py | 3 +- linkedin_mcp_server/tools/job.py | 2 +- linkedin_mcp_server/tools/person.py | 3 +- main.py | 8 +- smithery.yaml | 17 --- smithery_main.py | 130 ---------------------- test_imports.py | 45 -------- test_local_smithery.sh | 71 ------------ test_simple.sh | 11 -- test_smithery.py | 124 --------------------- tests/__init__.py | 1 - tests/conftest.py | 39 ------- tests/test_mcp_http.py | 153 -------------------------- tests/test_smithery_config.py | 116 ------------------- 21 files changed, 34 insertions(+), 886 deletions(-) delete mode 100644 Dockerfile.smithery delete mode 100644 SMITHERY_FIX_SUMMARY.md delete mode 100644 smithery.yaml delete mode 100644 smithery_main.py delete mode 100644 test_imports.py delete mode 100755 test_local_smithery.sh delete mode 100644 test_simple.sh delete mode 100644 test_smithery.py delete mode 100644 tests/__init__.py delete mode 100644 tests/conftest.py delete mode 100644 tests/test_mcp_http.py delete mode 100644 tests/test_smithery_config.py diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 1ecd3857..4c613079 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -2,16 +2,11 @@ "version": "2.0.0", "tasks": [ { - "label": "uv run pytest tests/", - "detail": "Run pytest tests for LinkedIn MCP server", + "label": "bunx @modelcontextprotocol/inspector", + "detail": "Run the Model Context Protocol Inspector", "type": "shell", - "command": "uv", - "args": [ - "run", - "pytest", - "tests/", - "-v" - ], + "command": "bunx", + "args": ["@modelcontextprotocol/inspector"], "group": { "kind": "test", "isDefault": true @@ -80,8 +75,7 @@ "--no-lazy-init" ], "group": { - "kind": "build", - "isDefault": true + "kind": "build" }, "presentation": { "reveal": "always", @@ -138,6 +132,6 @@ "focus": false }, "problemMatcher": [] - } + }, ] } diff --git a/Dockerfile.smithery b/Dockerfile.smithery deleted file mode 100644 index 878a4f48..00000000 --- a/Dockerfile.smithery +++ /dev/null @@ -1,41 +0,0 @@ -FROM python:3.12-alpine - -# Install system dependencies including Chromium and ChromeDriver -RUN apk add --no-cache \ - git \ - curl \ - chromium \ - chromium-chromedriver - -# Install uv from official image -COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ - -# Set working directory -WORKDIR /app - -# Copy project files -COPY . /app - -# Sync dependencies and install project -RUN --mount=type=cache,target=/root/.cache/uv \ - uv sync --frozen - -# Set ChromeDriver path for Alpine -ENV CHROMEDRIVER_PATH=/usr/bin/chromedriver - -# Set environment variables for Smithery -ENV LAZY_INIT=true \ - NON_INTERACTIVE=true \ - HEADLESS=true \ - TRANSPORT=streamable-http \ - DEBUG=false - -# Create a non-root user -RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app -USER mcpuser - -# Expose the port that will be set via PORT env var -EXPOSE 8000 - -# Smithery command - uses HTTP transport and PORT env var -CMD ["uv", "run", "python", "smithery_main.py"] diff --git a/SMITHERY_FIX_SUMMARY.md b/SMITHERY_FIX_SUMMARY.md deleted file mode 100644 index 0e9985ee..00000000 --- a/SMITHERY_FIX_SUMMARY.md +++ /dev/null @@ -1,109 +0,0 @@ -# Smithery Deployment Fix Summary - -## Issues Identified and Fixed - -### 1. **Tool Discovery Failure** -- **Problem**: Smithery couldn't scan tools from the server, getting "TypeError: fetch failed" -- **Root Cause**: Server wasn't properly handling HTTP requests for tool discovery -- **Fix**: Created proper ASGI app using FastMCP's `http_app()` method with middleware support - -### 2. **Configuration Handling** -- **Problem**: Server expected environment variables, but Smithery passes config as query parameters -- **Root Cause**: Misunderstanding of how Smithery passes configuration -- **Fix**: Implemented Starlette middleware to extract query parameters and update environment - -### 3. **Middleware Implementation** -- **Problem**: Initial attempt used incorrect FastMCP middleware API -- **Root Cause**: Used `@mcp.middleware()` decorator which doesn't exist -- **Fix**: Used proper Starlette middleware passed to `http_app()` method - -### 4. **Server Startup** -- **Problem**: Server needed to start without credentials for tool discovery -- **Root Cause**: Lazy initialization wasn't properly configured -- **Fix**: Ensured all environment variables are set for lazy init before imports - -## Key Changes Made - -### 1. **smithery_main.py** -```python -# Proper ASGI app creation with middleware -def create_app(): - mcp = create_mcp_server() - middleware = [Middleware(SmitheryConfigMiddleware)] - app = mcp.http_app(path="/mcp", middleware=middleware, transport="streamable-http") - return app - -# Use uvicorn to run the ASGI app -uvicorn.run(app, host="0.0.0.0", port=port) -``` - -### 2. **Configuration Updates** -- Updated `loaders.py` to support `LAZY_INIT` and `NON_INTERACTIVE` env vars -- Made credentials optional in `smithery.yaml` for tool discovery - -### 3. **Dockerfile.smithery** -- Added `CHROMEDRIVER_PATH` environment variable -- Set all required environment variables for Smithery mode - -## How Smithery Integration Works - -1. **Tool Discovery Phase**: - - Smithery sends requests to `/mcp` without credentials - - Server must respond with available tools list - - No Chrome driver or authentication needed - -2. **Tool Execution Phase**: - - Smithery passes credentials as query parameters: `/mcp?linkedin_email=...&linkedin_password=...` - - Middleware extracts these and updates environment - - Chrome driver is initialized only when tools are actually called - -3. **Configuration Flow**: - ``` - Smithery UI โ†’ Query Parameters โ†’ Middleware โ†’ Environment Variables โ†’ Config Reset โ†’ Tool Execution - ``` - -## Testing Commands - -```bash -# Local testing -chmod +x test_local_smithery.sh -./test_local_smithery.sh - -# Manual server start -PORT=8000 uv run python smithery_main.py - -# Test tool discovery -curl -X POST http://localhost:8000/mcp \ - -H "Content-Type: application/json" \ - -d '{"jsonrpc":"2.0","method":"tools/list","params":{},"id":1}' -``` - -## Deployment Steps - -1. **Commit and push changes**: - ```bash - git add -A - git commit -m "Fix Smithery deployment - proper query param handling and lazy init" - git push origin feat/smithery-http-transport - ``` - -2. **Monitor Smithery deployment**: - - Check Docker build succeeds - - Verify "Tool scanning" passes - - Test connection with credentials - -## Key Principles - -1. **Lazy Loading**: No resources initialized until needed -2. **Query Parameter Config**: Smithery passes config via URL params, not env vars -3. **ASGI Application**: Use FastMCP's `http_app()` for proper HTTP handling -4. **Middleware**: Use Starlette middleware for HTTP request processing -5. **Non-Interactive**: No prompts or user input in container environment - -## Troubleshooting - -If deployment still fails: -1. Check Smithery logs for specific errors -2. Ensure ChromeDriver is available at `/usr/bin/chromedriver` in container -3. Verify all Python dependencies are installed -4. Test locally with `test_local_smithery.sh` first diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index e9833624..7c6457d5 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -5,11 +5,12 @@ This module handles the command-line interface and configuration management. """ -from typing import Dict, Any, List -import os import json -import subprocess import logging +import os +import subprocess +from typing import Any, Dict, List + import pyperclip # type: ignore from linkedin_mcp_server.config import get_config diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 7e42010d..0acc3d88 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -1,12 +1,15 @@ # src/linkedin_mcp_server/config/secrets.py -from typing import Dict, Optional import logging +from typing import Dict, Optional + import inquirer # type: ignore + from linkedin_mcp_server.config import get_config + from .providers import ( get_credentials_from_keyring, - save_credentials_to_keyring, get_keyring_name, + save_credentials_to_keyring, ) logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 84e39045..63855e04 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -5,17 +5,19 @@ This module handles the creation and management of Chrome WebDriver instances. """ +import os import sys from typing import Dict, Optional -import os + +import inquirer # type: ignore from selenium import webdriver +from selenium.common.exceptions import WebDriverException from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service -from selenium.common.exceptions import WebDriverException -import inquirer # type: ignore + from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.secrets import get_credentials from linkedin_mcp_server.config.providers import clear_credentials_from_keyring +from linkedin_mcp_server.config.secrets import get_credentials # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 8ec067e8..3e746cd2 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -5,13 +5,14 @@ This module creates the MCP server and registers all the LinkedIn tools. """ -from typing import Dict, Any +from typing import Any, Dict + from fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import active_drivers -from linkedin_mcp_server.tools.person import register_person_tools from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools +from linkedin_mcp_server.tools.person import register_person_tools def create_mcp_server() -> FastMCP: diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 38e3f23c..4690a940 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -5,7 +5,8 @@ This module provides tools for scraping LinkedIn company profiles. """ -from typing import Dict, Any, List +from typing import Any, Dict, List + from fastmcp import FastMCP from linkedin_scraper import Company diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 3c3f1180..1af2a91b 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -7,8 +7,8 @@ from typing import Any, Dict, List -from linkedin_scraper import Job, JobSearch from fastmcp import FastMCP +from linkedin_scraper import Job, JobSearch from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index e917c34b..236fa7f4 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -5,7 +5,8 @@ This module provides tools for scraping LinkedIn person profiles. """ -from typing import Dict, Any, List +from typing import Any, Dict, List + from fastmcp import FastMCP from linkedin_scraper import Person diff --git a/main.py b/main.py index de886fa4..d48520e3 100644 --- a/main.py +++ b/main.py @@ -3,14 +3,16 @@ LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. """ -import sys import logging -import inquirer # type: ignore +import sys from typing import Literal +import inquirer # type: ignore + +from linkedin_mcp_server.cli import print_claude_config + # Import the new centralized configuration from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler diff --git a/smithery.yaml b/smithery.yaml deleted file mode 100644 index 53c95145..00000000 --- a/smithery.yaml +++ /dev/null @@ -1,17 +0,0 @@ -runtime: "container" -startCommand: - type: "http" - configSchema: - type: "object" - properties: - linkedin_email: - type: "string" - description: "LinkedIn email address for authentication" - linkedin_password: - type: "string" - description: "LinkedIn password for authentication" - sensitive: true - required: [linkedin_email, linkedin_password] -build: - dockerfile: "Dockerfile.smithery" - dockerBuildPath: "." diff --git a/smithery_main.py b/smithery_main.py deleted file mode 100644 index dbfeef73..00000000 --- a/smithery_main.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python3 -# smithery_main.py -""" -LinkedIn MCP Server - Smithery HTTP Transport Entry Point - -Handles Smithery's query parameter configuration approach. -Smithery passes config as query params: /mcp?linkedin_email=user@example.com&linkedin_password=pass -""" - -import logging -import os -import sys - -import uvicorn - -# Set up environment for lazy initialization -os.environ.setdefault("LINKEDIN_EMAIL", "") -os.environ.setdefault("LINKEDIN_PASSWORD", "") -os.environ.setdefault("LAZY_INIT", "true") -os.environ.setdefault("NON_INTERACTIVE", "true") -os.environ.setdefault("HEADLESS", "true") -os.environ.setdefault("TRANSPORT", "streamable-http") -os.environ.setdefault("DEBUG", "false") -os.environ.setdefault("CHROMEDRIVER_PATH", "/usr/bin/chromedriver") - -# Configure logging -logging.basicConfig( - level=logging.INFO if os.environ.get("DEBUG") == "true" else logging.ERROR, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", -) - -# Suppress noisy libraries -for logger_name in ["selenium", "urllib3", "httpx", "httpcore"]: - logging.getLogger(logger_name).setLevel(logging.ERROR) - -logger = logging.getLogger("smithery_main") - -# Import after environment setup -from starlette.middleware import Middleware # noqa: E402 -from starlette.middleware.base import BaseHTTPMiddleware # noqa: E402 -from starlette.requests import Request # noqa: E402 -from starlette.responses import Response # noqa: E402 - -from linkedin_mcp_server.config import reset_config # noqa: E402 -from linkedin_mcp_server.server import create_mcp_server # noqa: E402 - - -class SmitheryConfigMiddleware(BaseHTTPMiddleware): - """Extract Smithery query parameters and update environment.""" - - async def dispatch(self, request: Request, call_next) -> Response: - """Process query parameters before handling the request.""" - # Extract query parameters - query_params = dict(request.query_params) - - # Log incoming request for debugging - logger.info(f"Incoming request: {request.method} {request.url}") - logger.info(f"Query params: {query_params}") - - # Update environment if credentials are provided - if query_params: - # Check for linkedin credentials in query params - if "linkedin_email" in query_params: - os.environ["LINKEDIN_EMAIL"] = query_params["linkedin_email"] - logger.info("Updated LINKEDIN_EMAIL from query params") - - if "linkedin_password" in query_params: - os.environ["LINKEDIN_PASSWORD"] = query_params["linkedin_password"] - logger.info("Updated LINKEDIN_PASSWORD from query params") - - # Reset config to pick up new values - reset_config() - - # Process the request - response = await call_next(request) - return response - - -def create_app(): - """Create the FastMCP ASGI application with Smithery middleware.""" - # Create MCP server - mcp = create_mcp_server() - - # Create middleware list - middleware = [Middleware(SmitheryConfigMiddleware)] - - # Create HTTP app with middleware - app = mcp.http_app(path="/mcp", middleware=middleware, transport="streamable-http") - - return app - - -def main() -> None: - """Main entry point for Smithery deployment.""" - print("๐Ÿ”— LinkedIn MCP Server (Smithery Edition) ๐Ÿ”—") - print("=" * 50) - - # Get PORT from environment (Smithery requirement) - port = int(os.environ.get("PORT", 8000)) - - # Create the app - app = create_app() - - print(f"\n๐Ÿš€ Starting server on port {port}...") - print(f"๐Ÿ“ก Server endpoint: http://0.0.0.0:{port}/mcp") - print("๐Ÿ”ง Tools available for discovery") - print("โš™๏ธ Config via query params: ?linkedin_email=...&linkedin_password=...") - print("\nโœจ Server is starting...\n") - - # Run with uvicorn - uvicorn.run( - app, - host="0.0.0.0", - port=port, - log_level="error" if os.environ.get("DEBUG") != "true" else "info", - ) - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - print("\n๐Ÿ‘‹ Shutting down...") - sys.exit(0) - except Exception as e: - print(f"โŒ Fatal error: {e}") - import traceback - - traceback.print_exc() - sys.exit(1) diff --git a/test_imports.py b/test_imports.py deleted file mode 100644 index 9cba4fb9..00000000 --- a/test_imports.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -"""Test minimal imports and server creation.""" - -import os - -# Set environment variables first -os.environ["LINKEDIN_EMAIL"] = "" -os.environ["LINKEDIN_PASSWORD"] = "" -os.environ["LAZY_INIT"] = "true" -os.environ["NON_INTERACTIVE"] = "true" -os.environ["HEADLESS"] = "true" - -print("1. Environment variables set") - -try: - from linkedin_mcp_server.config import get_config - - print("2. Config imported successfully") - - config = get_config() - print( - f"3. Config loaded: lazy_init={config.server.lazy_init}, non_interactive={config.chrome.non_interactive}" - ) - - from linkedin_mcp_server.drivers.chrome import initialize_driver - - print("4. Chrome driver module imported") - - initialize_driver() - print("5. Driver initialized (should be lazy)") - - from linkedin_mcp_server.server import create_mcp_server - - print("6. Server module imported") - - mcp = create_mcp_server() - print("7. MCP server created") - - print("\nโœ… All imports and initialization successful!") - -except Exception as e: - print(f"\nโŒ Error at step: {e}") - import traceback - - traceback.print_exc() diff --git a/test_local_smithery.sh b/test_local_smithery.sh deleted file mode 100755 index 6e3e1c12..00000000 --- a/test_local_smithery.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash -# Local test script for Smithery configuration - -echo "๐Ÿ” Testing LinkedIn MCP Server (Smithery mode)..." -echo "================================================" - -# Set environment variables -export PORT=8000 -export LAZY_INIT=true -export NON_INTERACTIVE=true -export HEADLESS=true -export DEBUG=false - -# Start the server in background -echo "Starting server on port $PORT..." -uv run python smithery_main.py & -SERVER_PID=$! - -# Wait for server to start -echo "Waiting for server to start..." -sleep 5 - -# Test the server -echo -e "\n๐Ÿ“ก Testing server endpoints..." - -# Test 1: Initialize without credentials -echo -e "\n1. Testing initialize endpoint (no credentials)..." -curl -X POST "http://localhost:${PORT}/mcp" \ - -H "Content-Type: application/json" \ - -d '{ - "jsonrpc": "2.0", - "method": "initialize", - "params": { - "protocolVersion": "0.1.0", - "capabilities": {}, - "clientInfo": { - "name": "test-client", - "version": "1.0.0" - } - }, - "id": 0 - }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" - -# Test 2: List tools without credentials -echo -e "\n2. Testing tools/list endpoint (no credentials)..." -curl -X POST "http://localhost:${PORT}/mcp" \ - -H "Content-Type: application/json" \ - -d '{ - "jsonrpc": "2.0", - "method": "tools/list", - "params": {}, - "id": 1 - }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" - -# Test 3: Test with query parameters (simulating Smithery) -echo -e "\n3. Testing with query parameters (Smithery style)..." -curl -X POST "http://localhost:${PORT}/mcp?linkedin_email=test@example.com&linkedin_password=testpass" \ - -H "Content-Type: application/json" \ - -d '{ - "jsonrpc": "2.0", - "method": "tools/list", - "params": {}, - "id": 2 - }' -w "\nHTTP Status: %{http_code}\n" | jq . || echo "Failed to parse JSON" - -# Clean up -echo -e "\n๐Ÿ›‘ Stopping server..." -kill $SERVER_PID 2>/dev/null -wait $SERVER_PID 2>/dev/null - -echo -e "\nโœ… Test complete!" diff --git a/test_simple.sh b/test_simple.sh deleted file mode 100644 index 7d7cb4fc..00000000 --- a/test_simple.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -# Simple test to see if server starts - -export PORT=8000 -export LAZY_INIT=true -export NON_INTERACTIVE=true -export HEADLESS=true -export DEBUG=false - -echo "Starting server..." -uv run python smithery_main.py diff --git a/test_smithery.py b/test_smithery.py deleted file mode 100644 index acc46df8..00000000 --- a/test_smithery.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python3 -"""Test script to verify Smithery tool discovery works correctly.""" - -import asyncio -import httpx -import json -import sys - - -async def test_tool_discovery(): - """Test that the MCP server exposes its tools correctly.""" - base_url = "http://localhost:8000/mcp" - - print("๐Ÿ” Testing MCP Server Tool Discovery...") - print(f"๐Ÿ“ก Server URL: {base_url}") - print() - - async with httpx.AsyncClient(timeout=30.0) as client: - try: - # First, try to list available tools - print("1. Testing tool listing...") - - # MCP protocol request to list tools - request_data = { - "jsonrpc": "2.0", - "method": "tools/list", - "params": {}, - "id": 1, - } - - response = await client.post( - base_url, - json=request_data, - headers={"Content-Type": "application/json"}, - ) - - print(f" Response status: {response.status_code}") - - if response.status_code == 200: - data = response.json() - print(f" Response: {json.dumps(data, indent=2)}") - - if "result" in data and "tools" in data["result"]: - tools = data["result"]["tools"] - print(f"\nโœ… Found {len(tools)} tools:") - for tool in tools: - print( - f" - {tool.get('name', 'Unknown')}: {tool.get('description', 'No description')}" - ) - else: - print("โŒ No tools found in response") - else: - print(f"โŒ Server returned error: {response.text}") - - except Exception as e: - print(f"โŒ Error testing server: {e}") - return False - - return True - - -async def test_server_info(): - """Test basic server information endpoint.""" - base_url = "http://localhost:8000/mcp" - - print("\n2. Testing server info...") - - async with httpx.AsyncClient(timeout=30.0) as client: - try: - # Try to get server information - request_data = { - "jsonrpc": "2.0", - "method": "initialize", - "params": { - "protocolVersion": "0.1.0", - "capabilities": {}, - "clientInfo": {"name": "test-client", "version": "1.0.0"}, - }, - "id": 0, - } - - response = await client.post( - base_url, - json=request_data, - headers={"Content-Type": "application/json"}, - ) - - print(f" Response status: {response.status_code}") - - if response.status_code == 200: - data = response.json() - print(f" Server info: {json.dumps(data, indent=2)}") - print("โœ… Server initialization successful") - else: - print(f"โŒ Server returned error: {response.text}") - - except Exception as e: - print(f"โŒ Error testing server: {e}") - return False - - return True - - -async def main(): - """Run all tests.""" - print("=" * 50) - print("LinkedIn MCP Server - Smithery Test") - print("=" * 50) - - # Test server info first - if not await test_server_info(): - print("\nโš ๏ธ Server may not be running or configured correctly") - sys.exit(1) - - # Then test tool discovery - if not await test_tool_discovery(): - print("\nโš ๏ธ Tool discovery failed") - sys.exit(1) - - print("\nโœ… All tests passed!") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 65140f2e..00000000 --- a/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# tests package diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index aa437c6c..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,39 +0,0 @@ -# tests/conftest.py -""" -Simple pytest configuration for LinkedIn MCP server tests. -""" - -import os -import pytest -from linkedin_mcp_server.config import reset_config - - -@pytest.fixture(autouse=True) -def clean_environment(): - """Clean environment before each test.""" - # Reset configuration singleton - reset_config() - - # Clear environment variables that might affect tests - env_vars_to_clear = [ - "LINKEDIN_EMAIL", - "LINKEDIN_PASSWORD", - "DEBUG", - "CHROMEDRIVER", - "HEADLESS", - "TRANSPORT", - ] - original_env = {} - for var in env_vars_to_clear: - original_env[var] = os.environ.get(var) - if var in os.environ: - del os.environ[var] - - yield - - # Restore environment variables - for var, value in original_env.items(): - if value is not None: - os.environ[var] = value - elif var in os.environ: - del os.environ[var] diff --git a/tests/test_mcp_http.py b/tests/test_mcp_http.py deleted file mode 100644 index 48a9f0fc..00000000 --- a/tests/test_mcp_http.py +++ /dev/null @@ -1,153 +0,0 @@ -# tests/test_mcp_http.py -""" -Test that the MCP server HTTP transport works and tools are accessible. -""" - -import pytest -import asyncio -from unittest.mock import patch -from fastmcp.client import Client -from linkedin_mcp_server.server import create_mcp_server - - -@pytest.mark.asyncio -async def test_mcp_server_tools_accessible(): - """Test that MCP server tools are accessible via in-memory client.""" - # Mock sys.argv to avoid pytest argument parsing conflicts - with patch("sys.argv", ["main.py"]): - # Create MCP server - mcp = create_mcp_server() - - # Connect client directly to server (in-memory) - async with Client(mcp) as client: - # Test that we can list tools - tools = await client.list_tools() - - # Verify expected LinkedIn tools are present - tool_names = [tool.name for tool in tools] - expected_tools = [ - "get_person_profile", - "get_company_profile", - "get_job_details", - "close_session", - ] - - for expected_tool in expected_tools: - assert expected_tool in tool_names, ( - f"Tool '{expected_tool}' not found in {tool_names}" - ) - - print(f"โœ… Found {len(tools)} tools: {tool_names}") - - -@pytest.mark.asyncio -async def test_tools_have_proper_schemas(): - """Test that tools have proper input schemas.""" - with patch("sys.argv", ["main.py"]): - mcp = create_mcp_server() - - async with Client(mcp) as client: - tools = await client.list_tools() - - # Check each tool has required properties - for tool in tools: - assert tool.name is not None - assert tool.description is not None - assert len(tool.description) > 0 - - if tool.name in [ - "get_person_profile", - "get_company_profile", - "get_job_details", - ]: - # These tools should have input schemas - assert tool.inputSchema is not None - assert "properties" in tool.inputSchema - - print(f"โœ… All {len(tools)} tools have proper schemas") - - -@pytest.mark.asyncio -async def test_close_session_tool_works(): - """Test that close_session tool can be called successfully.""" - with patch("sys.argv", ["main.py"]): - mcp = create_mcp_server() - - async with Client(mcp) as client: - # Call close_session tool (should work without credentials) - result = await client.call_tool("close_session") - - assert result.content is not None - assert len(result.content) > 0 - - response = result.content[0] - assert response.type == "text" - assert len(response.text) > 0 - - print(f"โœ… close_session tool response: {response.text[:100]}...") - - -@pytest.mark.asyncio -async def test_tools_fail_gracefully_without_credentials(): - """Test that LinkedIn tools fail gracefully when no credentials provided.""" - # Mock sys.argv to avoid pytest argument parsing conflicts - with patch("sys.argv", ["main.py"]): - # Mock the driver creation to avoid WebDriver initialization - with patch( - "linkedin_mcp_server.drivers.chrome.get_or_create_driver" - ) as mock_driver: - mock_driver.return_value = None # Simulate no driver available - - mcp = create_mcp_server() - - async with Client(mcp) as client: - # Try to call a LinkedIn tool without credentials - # This should either return an error message or raise an exception gracefully - try: - result = await client.call_tool( - "get_person_profile", - {"linkedin_url": "https://www.linkedin.com/in/test-user/"}, - ) - - # If no exception, check that result indicates missing credentials - assert result.content is not None - response = result.content[0] - - # Should mention credentials, driver, or login issues - error_keywords = [ - "credential", - "driver", - "login", - "error", - "failed", - ] - assert any( - keyword in response.text.lower() for keyword in error_keywords - ), f"Expected error message about credentials, got: {response.text}" - - print(f"โœ… Tool failed gracefully: {response.text[:100]}...") - - except Exception as e: - # Exception is also acceptable - means proper error handling - print(f"โœ… Tool raised exception (acceptable): {str(e)[:100]}...") - - -def test_mcp_server_creation(): - """Test that MCP server can be created successfully.""" - with patch("sys.argv", ["main.py"]): - mcp = create_mcp_server() - - assert mcp is not None - assert mcp.name == "linkedin_scraper" - - print("โœ… MCP server created successfully") - - -if __name__ == "__main__": - # Run tests manually if executed directly - asyncio.run(test_mcp_server_tools_accessible()) - asyncio.run(test_tools_have_proper_schemas()) - asyncio.run(test_close_session_tool_works()) - asyncio.run(test_tools_fail_gracefully_without_credentials()) - test_mcp_server_creation() - print("๐ŸŽ‰ All tests passed!") diff --git a/tests/test_smithery_config.py b/tests/test_smithery_config.py deleted file mode 100644 index faa71a28..00000000 --- a/tests/test_smithery_config.py +++ /dev/null @@ -1,116 +0,0 @@ -# tests/test_smithery_config.py -""" -Test Smithery configuration parameter passing. -""" - -import os -from unittest.mock import MagicMock - -import pytest -from starlette.applications import Starlette -from starlette.requests import Request -from starlette.responses import PlainTextResponse - -from smithery_main import SmitheryConfigMiddleware, create_app - - -@pytest.mark.asyncio -async def test_smithery_middleware_extracts_config(): - """Test that SmitheryConfigMiddleware correctly extracts configuration from query parameters.""" - # Create a simple Starlette app for testing - app = Starlette() - middleware = SmitheryConfigMiddleware(app) - - # Create a mock request with query parameters - request = MagicMock(spec=Request) - request.method = "GET" - request.url = "http://test.com/mcp?linkedin_email=test@example.com&linkedin_password=testpass123" - request.query_params = { - "linkedin_email": "test@example.com", - "linkedin_password": "testpass123", - } - - # Mock call_next function - async def mock_call_next(req): - # During middleware execution, check that env vars are set - assert os.environ.get("LINKEDIN_EMAIL") == "test@example.com" - assert os.environ.get("LINKEDIN_PASSWORD") == "testpass123" - return PlainTextResponse("OK") - - # Store original env vars - original_email = os.environ.get("LINKEDIN_EMAIL") - original_password = os.environ.get("LINKEDIN_PASSWORD") - - try: - # Execute middleware - response = await middleware.dispatch(request, mock_call_next) - assert response.status_code == 200 - - print("โœ… Smithery middleware correctly handles configuration") - - finally: - # Cleanup - if original_email is not None: - os.environ["LINKEDIN_EMAIL"] = original_email - elif "LINKEDIN_EMAIL" in os.environ: - del os.environ["LINKEDIN_EMAIL"] - - if original_password is not None: - os.environ["LINKEDIN_PASSWORD"] = original_password - elif "LINKEDIN_PASSWORD" in os.environ: - del os.environ["LINKEDIN_PASSWORD"] - - -@pytest.mark.asyncio -async def test_smithery_middleware_with_empty_config(): - """Test that middleware works correctly with no configuration.""" - # Create a simple Starlette app for testing - app = Starlette() - middleware = SmitheryConfigMiddleware(app) - - # Create a mock request with no query parameters - request = MagicMock(spec=Request) - request.method = "GET" - request.url = "http://test.com/mcp" - request.query_params = {} - - # Mock call_next function - async def mock_call_next(req): - return PlainTextResponse("OK") - - # Should not raise any errors - response = await middleware.dispatch(request, mock_call_next) - assert response.status_code == 200 - - print("โœ… Smithery middleware handles empty configuration") - - -def test_smithery_app_creation(): - """Test that Smithery app can be created successfully.""" - app = create_app() - assert app is not None - print("โœ… Smithery app creation successful") - - -def test_smithery_middleware_param_handling(): - """Test that SmitheryConfigMiddleware correctly handles different parameter scenarios.""" - # Create a simple Starlette app for testing - app = Starlette() - middleware = SmitheryConfigMiddleware(app) - - # Test that middleware can be instantiated - assert middleware is not None - assert hasattr(middleware, "dispatch") - - print("โœ… Smithery middleware parameter handling is correct") - - -if __name__ == "__main__": - # Run tests manually if executed directly - import asyncio - - asyncio.run(test_smithery_middleware_extracts_config()) - asyncio.run(test_smithery_middleware_with_empty_config()) - test_smithery_app_creation() - test_smithery_middleware_param_handling() - print("๐ŸŽ‰ All Smithery configuration tests passed!") From 5a35d648de44d46790ffc7d4350160c0c1a3158e Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 4 Jul 2025 17:09:19 -0400 Subject: [PATCH 096/565] Claude PR Assistant workflow --- .github/workflows/claude.yml | 59 ++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 .github/workflows/claude.yml diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml new file mode 100644 index 00000000..58d0fa2e --- /dev/null +++ b/.github/workflows/claude.yml @@ -0,0 +1,59 @@ +name: Claude Code + +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + issues: + types: [opened, assigned] + pull_request_review: + types: [submitted] + +jobs: + claude: + if: | + (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || + (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code + id: claude + uses: anthropics/claude-code-action@beta + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) + # model: "claude-opus-4-20250514" + + # Optional: Customize the trigger phrase (default: @claude) + # trigger_phrase: "/claude" + + # Optional: Trigger when specific user is assigned to an issue + # assignee_trigger: "claude-bot" + + # Optional: Allow Claude to run specific commands + # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" + + # Optional: Add custom instructions for Claude to customize its behavior for your project + # custom_instructions: | + # Follow our coding standards + # Ensure all new code has tests + # Use TypeScript for new files + + # Optional: Custom environment variables for Claude + # claude_env: | + # NODE_ENV: test + From 85655ec25bf4340ac2d9c903ad084b59455682e7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 4 Jul 2025 17:09:20 -0400 Subject: [PATCH 097/565] Claude Code Review workflow --- .github/workflows/claude-code-review.yml | 75 ++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 .github/workflows/claude-code-review.yml diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml new file mode 100644 index 00000000..ecd27d0a --- /dev/null +++ b/.github/workflows/claude-code-review.yml @@ -0,0 +1,75 @@ +name: Claude Code Review + +on: + pull_request: + types: [opened, synchronize] + # Optional: Only run on specific file changes + # paths: + # - "src/**/*.ts" + # - "src/**/*.tsx" + # - "src/**/*.js" + # - "src/**/*.jsx" + +jobs: + claude-review: + # Optional: Filter by PR author + # if: | + # github.event.pull_request.user.login == 'external-contributor' || + # github.event.pull_request.user.login == 'new-developer' || + # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' + + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + issues: read + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Run Claude Code Review + id: claude-review + uses: anthropics/claude-code-action@beta + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) + # model: "claude-opus-4-20250514" + + # Direct prompt for automated review (no @claude mention needed) + direct_prompt: | + Please review this pull request and provide feedback on: + - Code quality and best practices + - Potential bugs or issues + - Performance considerations + - Security concerns + - Test coverage + + Be constructive and helpful in your feedback. + + # Optional: Customize review based on file types + # direct_prompt: | + # Review this PR focusing on: + # - For TypeScript files: Type safety and proper interface usage + # - For API endpoints: Security, input validation, and error handling + # - For React components: Performance, accessibility, and best practices + # - For tests: Coverage, edge cases, and test quality + + # Optional: Different prompts for different authors + # direct_prompt: | + # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && + # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || + # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} + + # Optional: Add specific tools for running tests or linting + # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" + + # Optional: Skip review for certain conditions + # if: | + # !contains(github.event.pull_request.title, '[skip-review]') && + # !contains(github.event.pull_request.title, '[WIP]') + From 0b8c73d688a32a66072324c7fb07ebb449adf85a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 17:11:51 -0400 Subject: [PATCH 098/565] Revert "Merge pull request #17 from stickerdaniel/feat/http-transport" This reverts commit b4e800feb622d5742e970fdfc259065bca8d53a6, reversing changes made to fed12db1927ba7393eb5dcc621fa6459566db78d. --- .vscode/tasks.json | 52 +-- linkedin_mcp_server/cli.py | 7 +- linkedin_mcp_server/config/loaders.py | 46 +- linkedin_mcp_server/config/schema.py | 6 +- linkedin_mcp_server/config/secrets.py | 7 +- linkedin_mcp_server/drivers/chrome.py | 10 +- linkedin_mcp_server/server.py | 7 +- linkedin_mcp_server/tools/company.py | 5 +- linkedin_mcp_server/tools/job.py | 2 +- linkedin_mcp_server/tools/person.py | 5 +- main.py | 25 +- pyproject.toml | 5 +- smithery.yaml | 29 ++ uv.lock | 649 ++------------------------ 14 files changed, 111 insertions(+), 744 deletions(-) create mode 100644 smithery.yaml diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4c613079..4cbf6ef6 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,23 +1,6 @@ { "version": "2.0.0", "tasks": [ - { - "label": "bunx @modelcontextprotocol/inspector", - "detail": "Run the Model Context Protocol Inspector", - "type": "shell", - "command": "bunx", - "args": ["@modelcontextprotocol/inspector"], - "group": { - "kind": "test", - "isDefault": true - }, - "presentation": { - "reveal": "always", - "panel": "new", - "focus": true - }, - "problemMatcher": [] - }, { "label": "uv run pre-commit run --all-files", "detail": "Run pre-commit hooks on all files", @@ -31,7 +14,7 @@ ], "group": { "kind": "test", - "isDefault": false + "isDefault": true }, "presentation": { "reveal": "never", @@ -74,38 +57,9 @@ "--no-headless", "--no-lazy-init" ], - "group": { - "kind": "build" - }, - "presentation": { - "reveal": "always", - "panel": "new", - "focus": true - }, - "problemMatcher": [] - }, - { - "label": "uv run main.py --transport streamable-http --no-setup", - "detail": "Start HTTP MCP server on localhost:8000/mcp", - "type": "shell", - "command": "uv", - "args": [ - "run", - "main.py", - "--transport", - "streamable-http", - "--host", - "127.0.0.1", - "--port", - "8000", - "--path", - "/mcp", - "--no-setup" - ], - "isBackground": true, "group": { "kind": "build", - "isDefault": false + "isDefault": true }, "presentation": { "reveal": "always", @@ -132,6 +86,6 @@ "focus": false }, "problemMatcher": [] - }, + } ] } diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index 7c6457d5..e9833624 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -5,12 +5,11 @@ This module handles the command-line interface and configuration management. """ -import json -import logging +from typing import Dict, Any, List import os +import json import subprocess -from typing import Any, Dict, List - +import logging import pyperclip # type: ignore from linkedin_mcp_server.config import get_config diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 08183638..3871d4e9 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -44,18 +44,6 @@ def load_from_env(config: AppConfig) -> AppConfig: # Headless mode if os.environ.get("HEADLESS") in ("0", "false", "False", "no", "No"): config.chrome.headless = False - elif os.environ.get("HEADLESS") in ("1", "true", "True", "yes", "Yes"): - config.chrome.headless = True - - # Non-interactive mode - if os.environ.get("NON_INTERACTIVE") in ("1", "true", "True", "yes", "Yes"): - config.chrome.non_interactive = True - - # Lazy initialization - if os.environ.get("LAZY_INIT") in ("1", "true", "True", "yes", "Yes"): - config.server.lazy_init = True - elif os.environ.get("LAZY_INIT") in ("0", "false", "False", "no", "No"): - config.server.lazy_init = False return config @@ -92,30 +80,9 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--transport", - choices=["stdio", "streamable-http"], - default=None, - help="Specify the transport mode (stdio or streamable-http)", - ) - - parser.add_argument( - "--host", - type=str, - default=None, - help="HTTP server host (default: 127.0.0.1)", - ) - - parser.add_argument( - "--port", - type=int, - default=None, - help="HTTP server port (default: 8000)", - ) - - parser.add_argument( - "--path", - type=str, + choices=["stdio", "sse"], default=None, - help="HTTP server path (default: /mcp)", + help="Specify the transport mode (stdio or sse)", ) parser.add_argument( @@ -142,15 +109,6 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.transport: config.server.transport = args.transport - if args.host: - config.server.host = args.host - - if args.port: - config.server.port = args.port - - if args.path: - config.server.path = args.path - if args.chromedriver: config.chrome.chromedriver_path = args.chromedriver diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 55d912f5..8d92585a 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -26,14 +26,10 @@ class LinkedInConfig: class ServerConfig: """MCP server configuration.""" - transport: Literal["stdio", "streamable-http"] = "stdio" + transport: Literal["stdio", "sse"] = "stdio" lazy_init: bool = True debug: bool = False setup: bool = True - # HTTP transport configuration - host: str = "127.0.0.1" - port: int = 8000 - path: str = "/mcp" @dataclass diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 0acc3d88..7e42010d 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -1,15 +1,12 @@ # src/linkedin_mcp_server/config/secrets.py -import logging from typing import Dict, Optional - +import logging import inquirer # type: ignore - from linkedin_mcp_server.config import get_config - from .providers import ( get_credentials_from_keyring, - get_keyring_name, save_credentials_to_keyring, + get_keyring_name, ) logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 63855e04..84e39045 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -5,19 +5,17 @@ This module handles the creation and management of Chrome WebDriver instances. """ -import os import sys from typing import Dict, Optional - -import inquirer # type: ignore +import os from selenium import webdriver -from selenium.common.exceptions import WebDriverException from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service - +from selenium.common.exceptions import WebDriverException +import inquirer # type: ignore from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.providers import clear_credentials_from_keyring from linkedin_mcp_server.config.secrets import get_credentials +from linkedin_mcp_server.config.providers import clear_credentials_from_keyring # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 3e746cd2..8cb959a6 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -5,14 +5,13 @@ This module creates the MCP server and registers all the LinkedIn tools. """ -from typing import Any, Dict - -from fastmcp import FastMCP +from typing import Dict, Any +from mcp.server.fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import active_drivers +from linkedin_mcp_server.tools.person import register_person_tools from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools -from linkedin_mcp_server.tools.person import register_person_tools def create_mcp_server() -> FastMCP: diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 4690a940..071797d2 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -5,9 +5,8 @@ This module provides tools for scraping LinkedIn company profiles. """ -from typing import Any, Dict, List - -from fastmcp import FastMCP +from typing import Dict, Any, List +from mcp.server.fastmcp import FastMCP from linkedin_scraper import Company from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 1af2a91b..88c3f027 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -7,8 +7,8 @@ from typing import Any, Dict, List -from fastmcp import FastMCP from linkedin_scraper import Job, JobSearch +from mcp.server.fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 236fa7f4..70957c93 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -5,9 +5,8 @@ This module provides tools for scraping LinkedIn person profiles. """ -from typing import Any, Dict, List - -from fastmcp import FastMCP +from typing import Dict, Any, List +from mcp.server.fastmcp import FastMCP from linkedin_scraper import Person from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/main.py b/main.py index d48520e3..351dc3ea 100644 --- a/main.py +++ b/main.py @@ -3,21 +3,19 @@ LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. """ -import logging import sys -from typing import Literal - +import logging import inquirer # type: ignore - -from linkedin_mcp_server.cli import print_claude_config +from typing import Literal # Import the new centralized configuration from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: +def choose_transport_interactive() -> Literal["stdio", "sse"]: """Prompt user for transport mode using inquirer.""" questions = [ inquirer.List( @@ -25,7 +23,7 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: message="Choose mcp transport mode", choices=[ ("stdio (Default CLI mode)", "stdio"), - ("streamable-http (HTTP server mode)", "streamable-http"), + ("sse (Server-Sent Events HTTP mode)", "sse"), ], default="stdio", ) @@ -69,18 +67,7 @@ def main() -> None: # Start server print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") - if transport == "streamable-http": - print( - f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" - ) - mcp.run( - transport=transport, - host=config.server.host, - port=config.server.port, - path=config.server.path, - ) - else: - mcp.run(transport=transport) + mcp.run(transport=transport) def exit_gracefully(exit_code: int = 0) -> None: diff --git a/pyproject.toml b/pyproject.toml index 4e109bba..5e113ea1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,10 +5,11 @@ description = "MCP server for LinkedIn profile, company, and job scraping with C readme = "README.md" requires-python = ">=3.12" dependencies = [ - "fastmcp>=2.10.1", + "httpx>=0.28.1", "inquirer>=3.4.0", "keyring>=25.6.0", "linkedin-scraper", + "mcp[cli]>=1.6.0", "pyperclip>=1.9.0", ] @@ -20,10 +21,8 @@ linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } [dependency-groups] dev = [ - "aiohttp>=3.12.13", "pre-commit>=4.2.0", "pytest>=8.3.5", - "pytest-asyncio>=1.0.0", "pytest-cov>=6.1.1", "ruff>=0.11.11", "ty>=0.0.1a12", diff --git a/smithery.yaml b/smithery.yaml new file mode 100644 index 00000000..548429bd --- /dev/null +++ b/smithery.yaml @@ -0,0 +1,29 @@ +# Smithery configuration file: https://smithery.ai/docs/build/project-config +version: 1 +start: + command: + - docker + - run + - -i + - --rm + - -e + - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} + - -e + - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} + - stickerdaniel/linkedin-mcp-server +configSchema: + # JSON Schema defining the configuration options for the MCP. + type: object + properties: + LINKEDIN_EMAIL: + type: string + description: Email for LinkedIn login + LINKEDIN_PASSWORD: + type: string + description: Password for LinkedIn login + required: + - LINKEDIN_EMAIL + - LINKEDIN_PASSWORD +exampleConfig: + LINKEDIN_EMAIL: example.user@example.com + LINKEDIN_PASSWORD: yourLinkedInPassword diff --git a/uv.lock b/uv.lock index 11a06d54..4e0e51b4 100644 --- a/uv.lock +++ b/uv.lock @@ -2,79 +2,6 @@ version = 1 revision = 2 requires-python = ">=3.12" -[[package]] -name = "aiohappyeyeballs" -version = "2.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, -] - -[[package]] -name = "aiohttp" -version = "3.12.13" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohappyeyeballs" }, - { name = "aiosignal" }, - { name = "attrs" }, - { name = "frozenlist" }, - { name = "multidict" }, - { name = "propcache" }, - { name = "yarl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, - { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, - { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, - { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, - { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, - { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, - { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, - { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, - { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, - { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, - { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, - { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910, upload-time = "2025-06-14T15:14:30.604Z" }, - { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566, upload-time = "2025-06-14T15:14:32.275Z" }, - { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856, upload-time = "2025-06-14T15:14:34.132Z" }, - { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683, upload-time = "2025-06-14T15:14:36.034Z" }, - { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946, upload-time = "2025-06-14T15:14:38Z" }, - { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017, upload-time = "2025-06-14T15:14:39.951Z" }, - { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390, upload-time = "2025-06-14T15:14:42.151Z" }, - { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719, upload-time = "2025-06-14T15:14:44.039Z" }, - { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424, upload-time = "2025-06-14T15:14:45.945Z" }, - { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447, upload-time = "2025-06-14T15:14:47.911Z" }, - { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110, upload-time = "2025-06-14T15:14:50.334Z" }, - { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706, upload-time = "2025-06-14T15:14:52.378Z" }, - { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839, upload-time = "2025-06-14T15:14:54.617Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311, upload-time = "2025-06-14T15:14:56.597Z" }, - { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202, upload-time = "2025-06-14T15:14:58.598Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794, upload-time = "2025-06-14T15:15:00.939Z" }, - { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735, upload-time = "2025-06-14T15:15:02.858Z" }, -] - -[[package]] -name = "aiosignal" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "frozenlist" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -116,18 +43,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] -[[package]] -name = "authlib" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a2/9d/b1e08d36899c12c8b894a44a5583ee157789f26fc4b176f8e4b6217b56e1/authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210", size = 158371, upload-time = "2025-05-23T00:21:45.011Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, -] - [[package]] name = "blessed" version = "1.20.0" @@ -160,8 +75,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, @@ -171,8 +84,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, @@ -300,7 +211,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" }, { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, @@ -310,9 +220,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" }, - { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" }, { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, @@ -322,8 +229,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, - { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, ] [[package]] @@ -335,15 +240,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] -[[package]] -name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, -] - [[package]] name = "editor" version = "1.6.6" @@ -357,51 +253,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, ] -[[package]] -name = "email-validator" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, -] - -[[package]] -name = "fastmcp" -version = "2.10.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "authlib" }, - { name = "exceptiongroup" }, - { name = "httpx" }, - { name = "mcp" }, - { name = "openapi-pydantic" }, - { name = "pydantic", extra = ["email"] }, - { name = "python-dotenv" }, - { name = "rich" }, - { name = "typer" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/33/1f/0031ea07bcad9f9b38d3500772d2749ca2b16335b92bd012f1d2f86a853e/fastmcp-2.10.1.tar.gz", hash = "sha256:450c72e523926a2203c7eecdb4a8b0507506667bc8736b8b7bb44f6312424649", size = 2730387, upload-time = "2025-07-02T04:57:24.981Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/a2/52ef74287ec5fe0e5a0ffedde7d0809da5ec3ac85f4e3f2ed5587b39471a/fastmcp-2.10.1-py3-none-any.whl", hash = "sha256:17d0acea04eeb3464c9eca42b6774fb06b38b72cface9af6a7482b3aa561db13", size = 182108, upload-time = "2025-07-02T04:57:23.529Z" }, -] - [[package]] name = "filelock" version = "3.18.0" @@ -411,66 +262,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, ] -[[package]] -name = "frozenlist" -version = "1.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, - { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, - { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, - { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, - { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, - { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, - { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, - { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, - { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, - { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, - { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, - { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, - { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, - { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, - { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, - { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, - { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, - { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, - { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, - { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, - { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, - { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, - { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, - { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, - { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, - { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, - { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, - { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, - { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, -] - [[package]] name = "h11" version = "0.14.0" @@ -612,33 +403,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload-time = "2024-07-31T22:39:17.426Z" }, ] -[[package]] -name = "jsonschema" -version = "4.24.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, -] - [[package]] name = "keyring" version = "25.6.0" @@ -661,19 +425,18 @@ name = "linkedin-mcp-server" version = "1.0.5" source = { virtual = "." } dependencies = [ - { name = "fastmcp" }, + { name = "httpx" }, { name = "inquirer" }, { name = "keyring" }, { name = "linkedin-scraper" }, + { name = "mcp", extra = ["cli"] }, { name = "pyperclip" }, ] [package.dev-dependencies] dev = [ - { name = "aiohttp" }, { name = "pre-commit" }, { name = "pytest" }, - { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "ruff" }, { name = "ty" }, @@ -681,19 +444,18 @@ dev = [ [package.metadata] requires-dist = [ - { name = "fastmcp", specifier = ">=2.10.1" }, + { name = "httpx", specifier = ">=0.28.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, + { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, ] [package.metadata.requires-dev] dev = [ - { name = "aiohttp", specifier = ">=3.12.13" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pytest", specifier = ">=8.3.5" }, - { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "ruff", specifier = ">=0.11.11" }, { name = "ty", specifier = ">=0.0.1a12" }, @@ -765,23 +527,27 @@ wheels = [ [[package]] name = "mcp" -version = "1.10.1" +version = "1.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, - { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, - { name = "python-multipart" }, { name = "sse-starlette" }, { name = "starlette" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, + { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload-time = "2025-03-27T16:46:32.336Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, + { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload-time = "2025-03-27T16:46:29.919Z" }, +] + +[package.optional-dependencies] +cli = [ + { name = "python-dotenv" }, + { name = "typer" }, ] [[package]] @@ -802,69 +568,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] -[[package]] -name = "multidict" -version = "6.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, - { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, - { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, - { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, - { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, - { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, - { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, - { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, - { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, - { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, - { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, - { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, - { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, - { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, - { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, - { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, - { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, - { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, - { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, - { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, - { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, - { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, - { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, - { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, - { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, - { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, - { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, - { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, - { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, - { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, - { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, - { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, - { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, - { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, - { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, - { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, - { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, - { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, - { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, - { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, - { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, - { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -874,18 +577,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] -[[package]] -name = "openapi-pydantic" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, -] - [[package]] name = "outcome" version = "1.3.0.post0" @@ -941,63 +632,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] -[[package]] -name = "propcache" -version = "0.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, - { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, - { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, - { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, - { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, - { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, - { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, - { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, - { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, - { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, - { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, - { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, - { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, -] - [[package]] name = "pycparser" version = "2.22" @@ -1009,7 +643,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1017,56 +651,51 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload-time = "2025-04-08T13:27:06.399Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, -] - -[package.optional-dependencies] -email = [ - { name = "email-validator" }, + { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload-time = "2025-04-08T13:27:03.789Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload-time = "2025-04-02T09:49:41.8Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload-time = "2025-04-02T09:47:25.394Z" }, + { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload-time = "2025-04-02T09:47:27.417Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload-time = "2025-04-02T09:47:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload-time = "2025-04-02T09:47:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload-time = "2025-04-02T09:47:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload-time = "2025-04-02T09:47:37.315Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload-time = "2025-04-02T09:47:39.013Z" }, + { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload-time = "2025-04-02T09:47:40.427Z" }, + { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload-time = "2025-04-02T09:47:42.01Z" }, + { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload-time = "2025-04-02T09:47:43.425Z" }, + { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload-time = "2025-04-02T09:47:44.979Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload-time = "2025-04-02T09:47:46.843Z" }, + { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload-time = "2025-04-02T09:47:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload-time = "2025-04-02T09:47:49.839Z" }, + { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload-time = "2025-04-02T09:47:51.648Z" }, + { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload-time = "2025-04-02T09:47:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload-time = "2025-04-02T09:47:55.006Z" }, + { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload-time = "2025-04-02T09:47:56.532Z" }, + { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload-time = "2025-04-02T09:47:58.088Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload-time = "2025-04-02T09:47:59.591Z" }, + { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload-time = "2025-04-02T09:48:01.397Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload-time = "2025-04-02T09:48:03.056Z" }, + { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload-time = "2025-04-02T09:48:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload-time = "2025-04-02T09:48:06.226Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload-time = "2025-04-02T09:48:08.114Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload-time = "2025-04-02T09:48:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload-time = "2025-04-02T09:48:11.288Z" }, + { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload-time = "2025-04-02T09:48:12.861Z" }, + { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload-time = "2025-04-02T09:48:14.553Z" }, + { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload-time = "2025-04-02T09:48:16.222Z" }, + { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload-time = "2025-04-02T09:48:17.97Z" }, ] [[package]] @@ -1121,18 +750,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, ] -[[package]] -name = "pytest-asyncio" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, -] - [[package]] name = "pytest-cov" version = "6.1.1" @@ -1155,15 +772,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] -[[package]] -name = "python-multipart" -version = "0.0.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, -] - [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -1208,20 +816,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] -[[package]] -name = "referencing" -version = "0.36.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, -] - [[package]] name = "requests" version = "2.32.3" @@ -1250,82 +844,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] -[[package]] -name = "rpds-py" -version = "0.26.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, - { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, - { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, - { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, - { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, - { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, - { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, - { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, - { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, - { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, - { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, - { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, - { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, - { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, - { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, - { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, - { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, - { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, - { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, - { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, - { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, - { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, - { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, - { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, - { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, - { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, - { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, - { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, - { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, - { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, - { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, - { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, - { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, - { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, - { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, - { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, - { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, - { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, - { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, - { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, - { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, - { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, - { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, - { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, - { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, - { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, - { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, - { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, - { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, - { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, - { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, - { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, - { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, -] - [[package]] name = "ruff" version = "0.11.11" @@ -1625,68 +1143,3 @@ sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e62 wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload-time = "2024-01-04T18:03:16.078Z" }, ] - -[[package]] -name = "yarl" -version = "1.20.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "multidict" }, - { name = "propcache" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, - { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, - { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, - { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, - { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, - { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, - { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, - { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, - { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, - { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, - { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, - { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, - { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, - { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, - { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, - { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, - { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, - { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, - { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, - { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, - { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, - { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, - { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, - { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, - { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, - { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, - { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, - { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, - { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, - { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, - { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, - { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, - { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, - { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, - { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, - { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, - { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, - { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, -] From 2735de0a647d2d9f676453cc7a586da90aa75742 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 17:13:18 -0400 Subject: [PATCH 099/565] chore(workflows): clean up whitespace in YAML files --- .github/workflows/claude-code-review.yml | 21 ++++++++++----------- .github/workflows/claude.yml | 13 ++++++------- 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index ecd27d0a..e9ebc818 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -17,14 +17,14 @@ jobs: # github.event.pull_request.user.login == 'external-contributor' || # github.event.pull_request.user.login == 'new-developer' || # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' - + runs-on: ubuntu-latest permissions: contents: read pull-requests: read issues: read id-token: write - + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -36,10 +36,10 @@ jobs: uses: anthropics/claude-code-action@beta with: anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) # model: "claude-opus-4-20250514" - + # Direct prompt for automated review (no @claude mention needed) direct_prompt: | Please review this pull request and provide feedback on: @@ -48,9 +48,9 @@ jobs: - Performance considerations - Security concerns - Test coverage - + Be constructive and helpful in your feedback. - + # Optional: Customize review based on file types # direct_prompt: | # Review this PR focusing on: @@ -58,18 +58,17 @@ jobs: # - For API endpoints: Security, input validation, and error handling # - For React components: Performance, accessibility, and best practices # - For tests: Coverage, edge cases, and test quality - + # Optional: Different prompts for different authors # direct_prompt: | - # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && + # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} - + # Optional: Add specific tools for running tests or linting # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" - + # Optional: Skip review for certain conditions # if: | # !contains(github.event.pull_request.title, '[skip-review]') && # !contains(github.event.pull_request.title, '[WIP]') - diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 58d0fa2e..8658b58d 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -34,26 +34,25 @@ jobs: uses: anthropics/claude-code-action@beta with: anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) # model: "claude-opus-4-20250514" - + # Optional: Customize the trigger phrase (default: @claude) # trigger_phrase: "/claude" - + # Optional: Trigger when specific user is assigned to an issue # assignee_trigger: "claude-bot" - + # Optional: Allow Claude to run specific commands # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" - + # Optional: Add custom instructions for Claude to customize its behavior for your project # custom_instructions: | # Follow our coding standards # Ensure all new code has tests # Use TypeScript for new files - + # Optional: Custom environment variables for Claude # claude_env: | # NODE_ENV: test - From b69a504ca78ac6fa1016dc46cab6b1dfedd8e5d6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 17:20:14 -0400 Subject: [PATCH 100/565] Reapply "Merge pull request #17 from stickerdaniel/feat/http-transport" This reverts commit 0b8c73d688a32a66072324c7fb07ebb449adf85a. --- .vscode/tasks.json | 52 ++- linkedin_mcp_server/cli.py | 7 +- linkedin_mcp_server/config/loaders.py | 46 +- linkedin_mcp_server/config/schema.py | 6 +- linkedin_mcp_server/config/secrets.py | 7 +- linkedin_mcp_server/drivers/chrome.py | 10 +- linkedin_mcp_server/server.py | 7 +- linkedin_mcp_server/tools/company.py | 5 +- linkedin_mcp_server/tools/job.py | 2 +- linkedin_mcp_server/tools/person.py | 5 +- main.py | 25 +- pyproject.toml | 5 +- smithery.yaml | 29 -- uv.lock | 649 ++++++++++++++++++++++++-- 14 files changed, 744 insertions(+), 111 deletions(-) delete mode 100644 smithery.yaml diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4cbf6ef6..4c613079 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,6 +1,23 @@ { "version": "2.0.0", "tasks": [ + { + "label": "bunx @modelcontextprotocol/inspector", + "detail": "Run the Model Context Protocol Inspector", + "type": "shell", + "command": "bunx", + "args": ["@modelcontextprotocol/inspector"], + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, { "label": "uv run pre-commit run --all-files", "detail": "Run pre-commit hooks on all files", @@ -14,7 +31,7 @@ ], "group": { "kind": "test", - "isDefault": true + "isDefault": false }, "presentation": { "reveal": "never", @@ -57,9 +74,38 @@ "--no-headless", "--no-lazy-init" ], + "group": { + "kind": "build" + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, + { + "label": "uv run main.py --transport streamable-http --no-setup", + "detail": "Start HTTP MCP server on localhost:8000/mcp", + "type": "shell", + "command": "uv", + "args": [ + "run", + "main.py", + "--transport", + "streamable-http", + "--host", + "127.0.0.1", + "--port", + "8000", + "--path", + "/mcp", + "--no-setup" + ], + "isBackground": true, "group": { "kind": "build", - "isDefault": true + "isDefault": false }, "presentation": { "reveal": "always", @@ -86,6 +132,6 @@ "focus": false }, "problemMatcher": [] - } + }, ] } diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index e9833624..7c6457d5 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -5,11 +5,12 @@ This module handles the command-line interface and configuration management. """ -from typing import Dict, Any, List -import os import json -import subprocess import logging +import os +import subprocess +from typing import Any, Dict, List + import pyperclip # type: ignore from linkedin_mcp_server.config import get_config diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 3871d4e9..08183638 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -44,6 +44,18 @@ def load_from_env(config: AppConfig) -> AppConfig: # Headless mode if os.environ.get("HEADLESS") in ("0", "false", "False", "no", "No"): config.chrome.headless = False + elif os.environ.get("HEADLESS") in ("1", "true", "True", "yes", "Yes"): + config.chrome.headless = True + + # Non-interactive mode + if os.environ.get("NON_INTERACTIVE") in ("1", "true", "True", "yes", "Yes"): + config.chrome.non_interactive = True + + # Lazy initialization + if os.environ.get("LAZY_INIT") in ("1", "true", "True", "yes", "Yes"): + config.server.lazy_init = True + elif os.environ.get("LAZY_INIT") in ("0", "false", "False", "no", "No"): + config.server.lazy_init = False return config @@ -80,9 +92,30 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--transport", - choices=["stdio", "sse"], + choices=["stdio", "streamable-http"], + default=None, + help="Specify the transport mode (stdio or streamable-http)", + ) + + parser.add_argument( + "--host", + type=str, + default=None, + help="HTTP server host (default: 127.0.0.1)", + ) + + parser.add_argument( + "--port", + type=int, + default=None, + help="HTTP server port (default: 8000)", + ) + + parser.add_argument( + "--path", + type=str, default=None, - help="Specify the transport mode (stdio or sse)", + help="HTTP server path (default: /mcp)", ) parser.add_argument( @@ -109,6 +142,15 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.transport: config.server.transport = args.transport + if args.host: + config.server.host = args.host + + if args.port: + config.server.port = args.port + + if args.path: + config.server.path = args.path + if args.chromedriver: config.chrome.chromedriver_path = args.chromedriver diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 8d92585a..55d912f5 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -26,10 +26,14 @@ class LinkedInConfig: class ServerConfig: """MCP server configuration.""" - transport: Literal["stdio", "sse"] = "stdio" + transport: Literal["stdio", "streamable-http"] = "stdio" lazy_init: bool = True debug: bool = False setup: bool = True + # HTTP transport configuration + host: str = "127.0.0.1" + port: int = 8000 + path: str = "/mcp" @dataclass diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 7e42010d..0acc3d88 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -1,12 +1,15 @@ # src/linkedin_mcp_server/config/secrets.py -from typing import Dict, Optional import logging +from typing import Dict, Optional + import inquirer # type: ignore + from linkedin_mcp_server.config import get_config + from .providers import ( get_credentials_from_keyring, - save_credentials_to_keyring, get_keyring_name, + save_credentials_to_keyring, ) logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 84e39045..63855e04 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -5,17 +5,19 @@ This module handles the creation and management of Chrome WebDriver instances. """ +import os import sys from typing import Dict, Optional -import os + +import inquirer # type: ignore from selenium import webdriver +from selenium.common.exceptions import WebDriverException from selenium.webdriver.chrome.options import Options from selenium.webdriver.chrome.service import Service -from selenium.common.exceptions import WebDriverException -import inquirer # type: ignore + from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.secrets import get_credentials from linkedin_mcp_server.config.providers import clear_credentials_from_keyring +from linkedin_mcp_server.config.secrets import get_credentials # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 8cb959a6..3e746cd2 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -5,13 +5,14 @@ This module creates the MCP server and registers all the LinkedIn tools. """ -from typing import Dict, Any -from mcp.server.fastmcp import FastMCP +from typing import Any, Dict + +from fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import active_drivers -from linkedin_mcp_server.tools.person import register_person_tools from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools +from linkedin_mcp_server.tools.person import register_person_tools def create_mcp_server() -> FastMCP: diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 071797d2..4690a940 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -5,8 +5,9 @@ This module provides tools for scraping LinkedIn company profiles. """ -from typing import Dict, Any, List -from mcp.server.fastmcp import FastMCP +from typing import Any, Dict, List + +from fastmcp import FastMCP from linkedin_scraper import Company from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 88c3f027..1af2a91b 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -7,8 +7,8 @@ from typing import Any, Dict, List +from fastmcp import FastMCP from linkedin_scraper import Job, JobSearch -from mcp.server.fastmcp import FastMCP from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 70957c93..236fa7f4 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -5,8 +5,9 @@ This module provides tools for scraping LinkedIn person profiles. """ -from typing import Dict, Any, List -from mcp.server.fastmcp import FastMCP +from typing import Any, Dict, List + +from fastmcp import FastMCP from linkedin_scraper import Person from linkedin_mcp_server.drivers.chrome import get_or_create_driver diff --git a/main.py b/main.py index 351dc3ea..d48520e3 100644 --- a/main.py +++ b/main.py @@ -3,19 +3,21 @@ LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. """ -import sys import logging -import inquirer # type: ignore +import sys from typing import Literal +import inquirer # type: ignore + +from linkedin_mcp_server.cli import print_claude_config + # Import the new centralized configuration from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -def choose_transport_interactive() -> Literal["stdio", "sse"]: +def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: """Prompt user for transport mode using inquirer.""" questions = [ inquirer.List( @@ -23,7 +25,7 @@ def choose_transport_interactive() -> Literal["stdio", "sse"]: message="Choose mcp transport mode", choices=[ ("stdio (Default CLI mode)", "stdio"), - ("sse (Server-Sent Events HTTP mode)", "sse"), + ("streamable-http (HTTP server mode)", "streamable-http"), ], default="stdio", ) @@ -67,7 +69,18 @@ def main() -> None: # Start server print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") - mcp.run(transport=transport) + if transport == "streamable-http": + print( + f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" + ) + mcp.run( + transport=transport, + host=config.server.host, + port=config.server.port, + path=config.server.path, + ) + else: + mcp.run(transport=transport) def exit_gracefully(exit_code: int = 0) -> None: diff --git a/pyproject.toml b/pyproject.toml index 5e113ea1..4e109bba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,11 +5,10 @@ description = "MCP server for LinkedIn profile, company, and job scraping with C readme = "README.md" requires-python = ">=3.12" dependencies = [ - "httpx>=0.28.1", + "fastmcp>=2.10.1", "inquirer>=3.4.0", "keyring>=25.6.0", "linkedin-scraper", - "mcp[cli]>=1.6.0", "pyperclip>=1.9.0", ] @@ -21,8 +20,10 @@ linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } [dependency-groups] dev = [ + "aiohttp>=3.12.13", "pre-commit>=4.2.0", "pytest>=8.3.5", + "pytest-asyncio>=1.0.0", "pytest-cov>=6.1.1", "ruff>=0.11.11", "ty>=0.0.1a12", diff --git a/smithery.yaml b/smithery.yaml deleted file mode 100644 index 548429bd..00000000 --- a/smithery.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# Smithery configuration file: https://smithery.ai/docs/build/project-config -version: 1 -start: - command: - - docker - - run - - -i - - --rm - - -e - - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} - - -e - - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} - - stickerdaniel/linkedin-mcp-server -configSchema: - # JSON Schema defining the configuration options for the MCP. - type: object - properties: - LINKEDIN_EMAIL: - type: string - description: Email for LinkedIn login - LINKEDIN_PASSWORD: - type: string - description: Password for LinkedIn login - required: - - LINKEDIN_EMAIL - - LINKEDIN_PASSWORD -exampleConfig: - LINKEDIN_EMAIL: example.user@example.com - LINKEDIN_PASSWORD: yourLinkedInPassword diff --git a/uv.lock b/uv.lock index 4e0e51b4..11a06d54 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,79 @@ version = 1 revision = 2 requires-python = ">=3.12" +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, + { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, + { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, + { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, + { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, + { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910, upload-time = "2025-06-14T15:14:30.604Z" }, + { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566, upload-time = "2025-06-14T15:14:32.275Z" }, + { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856, upload-time = "2025-06-14T15:14:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683, upload-time = "2025-06-14T15:14:36.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946, upload-time = "2025-06-14T15:14:38Z" }, + { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017, upload-time = "2025-06-14T15:14:39.951Z" }, + { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390, upload-time = "2025-06-14T15:14:42.151Z" }, + { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719, upload-time = "2025-06-14T15:14:44.039Z" }, + { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424, upload-time = "2025-06-14T15:14:45.945Z" }, + { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447, upload-time = "2025-06-14T15:14:47.911Z" }, + { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110, upload-time = "2025-06-14T15:14:50.334Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706, upload-time = "2025-06-14T15:14:52.378Z" }, + { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839, upload-time = "2025-06-14T15:14:54.617Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311, upload-time = "2025-06-14T15:14:56.597Z" }, + { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202, upload-time = "2025-06-14T15:14:58.598Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794, upload-time = "2025-06-14T15:15:00.939Z" }, + { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735, upload-time = "2025-06-14T15:15:02.858Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -43,6 +116,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] +[[package]] +name = "authlib" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/9d/b1e08d36899c12c8b894a44a5583ee157789f26fc4b176f8e4b6217b56e1/authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210", size = 158371, upload-time = "2025-05-23T00:21:45.011Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, +] + [[package]] name = "blessed" version = "1.20.0" @@ -75,6 +160,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, @@ -84,6 +171,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, @@ -211,6 +300,7 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" }, { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, @@ -220,6 +310,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" }, + { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" }, { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, @@ -229,6 +322,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, ] [[package]] @@ -240,6 +335,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + [[package]] name = "editor" version = "1.6.6" @@ -253,6 +357,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, ] +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "fastmcp" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib" }, + { name = "exceptiongroup" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "openapi-pydantic" }, + { name = "pydantic", extra = ["email"] }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/1f/0031ea07bcad9f9b38d3500772d2749ca2b16335b92bd012f1d2f86a853e/fastmcp-2.10.1.tar.gz", hash = "sha256:450c72e523926a2203c7eecdb4a8b0507506667bc8736b8b7bb44f6312424649", size = 2730387, upload-time = "2025-07-02T04:57:24.981Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/a2/52ef74287ec5fe0e5a0ffedde7d0809da5ec3ac85f4e3f2ed5587b39471a/fastmcp-2.10.1-py3-none-any.whl", hash = "sha256:17d0acea04eeb3464c9eca42b6774fb06b38b72cface9af6a7482b3aa561db13", size = 182108, upload-time = "2025-07-02T04:57:23.529Z" }, +] + [[package]] name = "filelock" version = "3.18.0" @@ -262,6 +411,66 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, ] +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + [[package]] name = "h11" version = "0.14.0" @@ -403,6 +612,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload-time = "2024-07-31T22:39:17.426Z" }, ] +[[package]] +name = "jsonschema" +version = "4.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + [[package]] name = "keyring" version = "25.6.0" @@ -425,18 +661,19 @@ name = "linkedin-mcp-server" version = "1.0.5" source = { virtual = "." } dependencies = [ - { name = "httpx" }, + { name = "fastmcp" }, { name = "inquirer" }, { name = "keyring" }, { name = "linkedin-scraper" }, - { name = "mcp", extra = ["cli"] }, { name = "pyperclip" }, ] [package.dev-dependencies] dev = [ + { name = "aiohttp" }, { name = "pre-commit" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "ruff" }, { name = "ty" }, @@ -444,18 +681,19 @@ dev = [ [package.metadata] requires-dist = [ - { name = "httpx", specifier = ">=0.28.1" }, + { name = "fastmcp", specifier = ">=2.10.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, - { name = "mcp", extras = ["cli"], specifier = ">=1.6.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, ] [package.metadata.requires-dev] dev = [ + { name = "aiohttp", specifier = ">=3.12.13" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "ruff", specifier = ">=0.11.11" }, { name = "ty", specifier = ">=0.0.1a12" }, @@ -527,27 +765,23 @@ wheels = [ [[package]] name = "mcp" -version = "1.6.0" +version = "1.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "python-multipart" }, { name = "sse-starlette" }, { name = "starlette" }, - { name = "uvicorn" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload-time = "2025-03-27T16:46:32.336Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload-time = "2025-03-27T16:46:29.919Z" }, -] - -[package.optional-dependencies] -cli = [ - { name = "python-dotenv" }, - { name = "typer" }, + { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, ] [[package]] @@ -568,6 +802,69 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] +[[package]] +name = "multidict" +version = "6.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, + { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, + { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, + { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, + { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, + { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, + { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, + { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, + { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, + { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, + { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, + { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, + { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, + { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, + { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, + { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, + { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, + { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, + { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, + { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, + { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, +] + [[package]] name = "nodeenv" version = "1.9.1" @@ -577,6 +874,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "openapi-pydantic" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, +] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -632,6 +941,63 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + [[package]] name = "pycparser" version = "2.22" @@ -643,7 +1009,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.3" +version = "2.11.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -651,51 +1017,56 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513, upload-time = "2025-04-08T13:27:06.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591, upload-time = "2025-04-08T13:27:03.789Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, ] [[package]] name = "pydantic-core" -version = "2.33.1" +version = "2.33.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395, upload-time = "2025-04-02T09:49:41.8Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640, upload-time = "2025-04-02T09:47:25.394Z" }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649, upload-time = "2025-04-02T09:47:27.417Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472, upload-time = "2025-04-02T09:47:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509, upload-time = "2025-04-02T09:47:33.464Z" }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702, upload-time = "2025-04-02T09:47:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428, upload-time = "2025-04-02T09:47:37.315Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753, upload-time = "2025-04-02T09:47:39.013Z" }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849, upload-time = "2025-04-02T09:47:40.427Z" }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541, upload-time = "2025-04-02T09:47:42.01Z" }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225, upload-time = "2025-04-02T09:47:43.425Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373, upload-time = "2025-04-02T09:47:44.979Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034, upload-time = "2025-04-02T09:47:46.843Z" }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848, upload-time = "2025-04-02T09:47:48.404Z" }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986, upload-time = "2025-04-02T09:47:49.839Z" }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551, upload-time = "2025-04-02T09:47:51.648Z" }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785, upload-time = "2025-04-02T09:47:53.149Z" }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758, upload-time = "2025-04-02T09:47:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109, upload-time = "2025-04-02T09:47:56.532Z" }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159, upload-time = "2025-04-02T09:47:58.088Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222, upload-time = "2025-04-02T09:47:59.591Z" }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980, upload-time = "2025-04-02T09:48:01.397Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840, upload-time = "2025-04-02T09:48:03.056Z" }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518, upload-time = "2025-04-02T09:48:04.662Z" }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025, upload-time = "2025-04-02T09:48:06.226Z" }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991, upload-time = "2025-04-02T09:48:08.114Z" }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262, upload-time = "2025-04-02T09:48:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626, upload-time = "2025-04-02T09:48:11.288Z" }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590, upload-time = "2025-04-02T09:48:12.861Z" }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963, upload-time = "2025-04-02T09:48:14.553Z" }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896, upload-time = "2025-04-02T09:48:16.222Z" }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810, upload-time = "2025-04-02T09:48:17.97Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, ] [[package]] @@ -750,6 +1121,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, ] +[[package]] +name = "pytest-asyncio" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, +] + [[package]] name = "pytest-cov" version = "6.1.1" @@ -772,6 +1155,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -816,6 +1208,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + [[package]] name = "requests" version = "2.32.3" @@ -844,6 +1250,82 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] +[[package]] +name = "rpds-py" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, + { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, + { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, + { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, + { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, + { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, + { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, + { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, + { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, + { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, + { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, + { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, + { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, + { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, + { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, + { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, + { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, +] + [[package]] name = "ruff" version = "0.11.11" @@ -1143,3 +1625,68 @@ sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e62 wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload-time = "2024-01-04T18:03:16.078Z" }, ] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] From 2f401b597804f0b7685d4767227c327217ce5997 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 17:26:44 -0400 Subject: [PATCH 101/565] chore(version): bump version to 1.0.6 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4e109bba..a92c58a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.5" +version = "1.0.6" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 11a06d54..2d70d773 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.5" +version = "1.0.6" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From dbacc81d57a29f38cc30b5fa8f736cd4b5596a60 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 4 Jul 2025 21:27:14 +0000 Subject: [PATCH 102/565] chore(dxt): update manifest.json version to v1.0.6 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 47f91790..68228c21 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.5", + "version": "1.0.6", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 7076bde44fdf11e5d3b56730dc5a6ea8dfc72e4a Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 4 Jul 2025 17:29:33 -0400 Subject: [PATCH 103/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 21794306..3f8f6012 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs > - **Session Management** (`close_session`): Properly close browser session and clean up resources -**Known Issues:** +**Known Issues: (should be fixed after this [PR](https://github.com/joeyism/linkedin_scraper/pull/252) is merged)** > [!WARNING] > - **Job Search** (`search_jobs`): Compatibility issues with LinkedIn's search interface > - **Recommended Jobs** (`get_recommended_jobs`): Selenium method compatibility issues From ad4b0ce48a16615015beb59fe2e4d639e5d0b675 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 18:15:21 -0400 Subject: [PATCH 104/565] fix(Dockerfile): set entrypoint and default --no-setup flag --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 003c7c49..1d1d57d9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,5 +24,6 @@ RUN --mount=type=cache,target=/root/.cache/uv \ RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser -# Default command -CMD ["uv", "run", "python", "main.py", "--no-setup"] +# Set entrypoint and default arguments +ENTRYPOINT ["uv", "run", "python", "main.py", "--no-setup"] +CMD [] From e620cf9a26d02bb3e8dc1d379c3547c512cf870d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 18:20:00 -0400 Subject: [PATCH 105/565] chore(version): bump version to 1.0.7 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a92c58a3..5284dcfc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.6" +version = "1.0.7" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 2d70d773..c8c796cb 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.6" +version = "1.0.7" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 69cb60a747f5ee6836b4779a769ddb74ee2dc70e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 4 Jul 2025 22:20:26 +0000 Subject: [PATCH 106/565] chore(dxt): update manifest.json version to v1.0.7 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 68228c21..5f45a5e2 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.6", + "version": "1.0.7", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 87b9706f8b08666e98e551f3a2fa604be3e76520 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 19:21:07 -0400 Subject: [PATCH 107/565] docs(README): enhance configuration and troubleshooting sections --- README.md | 34 +++++++++++++++++++++++++++++----- main.py | 2 +- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 3f8f6012..7703b02f 100644 --- a/README.md +++ b/README.md @@ -69,14 +69,36 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ```
-๐Ÿณ Manual Docker Usage +๐Ÿ”ง Configuration + +**Transport Modes:** +- **Default (stdio)**: Standard communication for local MCP servers +- **Streamable HTTP**: For a web-based MCP server + +**CLI Options:** +- `--debug` - Enable detailed logging +- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call +- `--transport {stdio,streamable-http}` - Set transport mode +- `--host HOST` - HTTP server host (default: 127.0.0.1) +- `--port PORT` - HTTP server port (default: 8000) +- `--path PATH` - HTTP server path (default: /mcp) +**HTTP Mode Example (for web-based MCP clients):** ```bash docker run -i --rm \ -e LINKEDIN_EMAIL="your.email@example.com" \ -e LINKEDIN_PASSWORD="your_password" \ - stickerdaniel/linkedin-mcp-server + -p 8080:8080 \ + stickerdaniel/linkedin-mcp-server \ + --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` +**Test with mcp inspector:** +1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` +2. Click pre-filled token url to open the inspector in your browser +3. Select `Streamable HTTP` as `Transport Type` +4. Set `URL` to `http://localhost:8080/mcp` +5. Connect +6. Test tools
@@ -90,6 +112,7 @@ docker run -i --rm \ **Login issues:** - Ensure your LinkedIn credentials are set and correct - LinkedIn may require a login confirmation in the LinkedIn mobile app +- You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) @@ -112,6 +135,7 @@ docker run -i --rm \ **Login issues:** - Ensure your LinkedIn credentials are set and correct - LinkedIn may require a login confirmation in the LinkedIn mobile app +- You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) ## ๐Ÿ Local Setup (Develop & Contribute) @@ -155,7 +179,7 @@ uv run main.py --no-headless --no-lazy-init **CLI Options:** - `--no-headless` - Show browser window (debugging) - `--debug` - Enable detailed logging -- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env) +- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env or or run the server once manualy, then it will be stored in your OS keychain and you can run the server without credentials) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call **Claude Desktop:** @@ -175,8 +199,8 @@ uv run main.py --no-headless --no-lazy-init
โ— Troubleshooting -**Scraping issues:** -- Use `--no-headless` to see browser actions +**Login/Scraping issues:** +- Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) - Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call - Add `--debug` to see more detailed logging diff --git a/main.py b/main.py index d48520e3..8e23d5d3 100644 --- a/main.py +++ b/main.py @@ -52,7 +52,7 @@ def main() -> None: logger = logging.getLogger("linkedin_mcp_server") logger.debug(f"Server configuration: {config}") - # Initialize the driver with configuration + # Initialize the driver with configuration (initialize driver checks for lazy init options) initialize_driver() # Decide transport From a3e90f592e3be27d6295a9bdfdae3e20986c1da6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 19:38:38 -0400 Subject: [PATCH 108/565] docs(bug_report): simpler bug report template --- .github/ISSUE_TEMPLATE/bug_report.md | 104 +++++++-------------------- 1 file changed, 25 insertions(+), 79 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index ca4d2122..394e9546 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -7,93 +7,39 @@ assignees: '' --- -## Bug Description -**Describe the bug** -A clear and concise description of what the bug is. - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Actual behavior** -What actually happened instead. - -## MCP Configuration & Client Info -**MCP Client Used** -- [ ] Claude Desktop -- [ ] Other MCP client (specify): ___________ - -**Claude Desktop Configuration** -Please share your MCP configuration from Claude Desktop settings (remove sensitive info): +## Installation Method +- [ ] Docker (specify docker image version/tag): _._._ +- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._ +- [ ] Local Python setup + +## When does the error occur? +- [ ] At startup +- [ ] During tool call (specify which tool): + - [ ] get_person_profile + - [ ] get_company_profile + - [ ] get_job_details + - [ ] search_jobs + - [ ] get_recommended_jobs + - [ ] close_session + +## MCP Client Configuration + +**Claude Desktop Config** (`/Users/[username]/Library/Application Support/Claude/claude_desktop_config.json`): ```json { "mcpServers": { - "linkedin-scraper": { - // Your configuration here + "linkedin": { + // Your configuration here (remove sensitive credentials) } } } ``` -**Transport Mode** -- [ ] stdio -- [ ] sse - -## Environment Details -**Operating System** -- [ ] macOS -- [ ] Windows -- [ ] Linux - -**Python Version** -- Python version: ___________ - -**Package Manager used** -- [ ] UV (recommended) -- [ ] pip -- [ ] Other: ___________ - -**ChromeDriver Info** -- ChromeDriver location: ___________ -- Installation method: - - [ ] Auto-detected - - [ ] Manual path specified - - [ ] Environment variable - -## Tool & LinkedIn Context -**Tool Used** -- [ ] get_person_profile -- [ ] get_company_profile -- [ ] get_job_details -- [ ] search_jobs -- [ ] get_recommended_jobs -- [ ] close_session - -**LinkedIn Context** (if applicable) -- Account type: [ ] Free [ ] Premium [ ] Sales Navigator -- Two-factor authentication enabled: [ ] Yes [ ] No -- Corporate/VPN network: [ ] Yes [ ] No - -## Error Details -**Error Messages** -``` -Paste any error messages here -``` - -**Console Output/Logs** +## MCP Client Logs +**Claude Desktop Logs** (`/Users/[username]/Library/Logs/Claude/mcp-server-LinkedIn MCP Server.log`): ``` -Paste relevant console output or logs here +Paste relevant log entries here ``` -## Steps to Reproduce -1. Go to '...' -2. Send message '....' -3. Scroll down to '....' -4. See error - -## Screenshots/Videos -If applicable, add screenshots or videos to help explain your problem. - -## Additional Context -- Issue also occurs in `--no-headless` mode: [ ] Yes [ ] No - -Add any other context about the problem here. +## Error Description +What went wrong and what did you expect to happen? From 1d75bb79dedfb55eef03c71d53372cc44d6b9c94 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 4 Jul 2025 19:39:37 -0400 Subject: [PATCH 109/565] docs(issue_template): remove unused sections from template --- .github/ISSUE_TEMPLATE/documentation_issue.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/documentation_issue.md b/.github/ISSUE_TEMPLATE/documentation_issue.md index 0147b010..816b05a3 100644 --- a/.github/ISSUE_TEMPLATE/documentation_issue.md +++ b/.github/ISSUE_TEMPLATE/documentation_issue.md @@ -22,9 +22,6 @@ assignees: '' ## Location **Where is the documentation issue?** - [ ] README.md -- [ ] Installation section -- [ ] Configuration section -- [ ] Troubleshooting section - [ ] Code comments - [ ] Error messages - [ ] CLI help text From 70fe29f271e76bc2471eac4fa5a32685936ef458 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 11:45:43 -0400 Subject: [PATCH 110/565] feat(exceptions): enhance error handling and logging to give accurate login action feedback --- README.md | 17 ++- linkedin_mcp_server/config/loaders.py | 3 + linkedin_mcp_server/config/secrets.py | 13 +- linkedin_mcp_server/drivers/chrome.py | 192 ++++++++++++++++++++------ linkedin_mcp_server/error_handler.py | 183 ++++++++++++++++++++++++ linkedin_mcp_server/exceptions.py | 24 ++++ linkedin_mcp_server/logging_config.py | 76 ++++++++++ linkedin_mcp_server/tools/company.py | 115 ++++++++------- linkedin_mcp_server/tools/job.py | 91 ++++++------ linkedin_mcp_server/tools/person.py | 131 +++++++++--------- main.py | 23 ++- pyproject.toml | 2 +- uv.lock | 91 ++++++------ 13 files changed, 680 insertions(+), 281 deletions(-) create mode 100644 linkedin_mcp_server/error_handler.py create mode 100644 linkedin_mcp_server/exceptions.py create mode 100644 linkedin_mcp_server/logging_config.py diff --git a/README.md b/README.md index 7703b02f..20e66625 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,17 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > - **Recommended Jobs** (`get_recommended_jobs`): Selenium method compatibility issues > - **Company Profiles** (`get_company_profile`): Some companies can't be accessed / may return empty results (need further investigation) +## ๐Ÿ›ก๏ธ Error Handling & Non-Interactive Mode + +**NEW**: Enhanced error handling for Docker and CI/CD environments! + +The server now provides detailed error information when login fails: +- **Specific error types**: `credentials_not_found`, `invalid_credentials`, `captcha_required`, `two_factor_auth_required`, `rate_limit` +- **Non-interactive mode**: Use `--no-setup` to skip all prompts (perfect for Docker) +- **Structured responses**: Each error includes type, message, and resolution steps + +For detailed error handling documentation, see [ERROR_HANDLING.md](ERROR_HANDLING.md) + --- ## ๐Ÿณ Docker Setup (Recommended - Universal) @@ -57,7 +68,8 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "run", "-i", "--rm", "-e", "LINKEDIN_EMAIL", "-e", "LINKEDIN_PASSWORD", - "stickerdaniel/linkedin-mcp-server" + "stickerdaniel/linkedin-mcp-server", + "--no-setup" ], "env": { "LINKEDIN_EMAIL": "your.email@example.com", @@ -76,6 +88,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c - **Streamable HTTP**: For a web-based MCP server **CLI Options:** +- `--no-setup` - Skip interactive prompts (required for Docker/non-interactive environments) - `--debug` - Enable detailed logging - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode @@ -90,7 +103,7 @@ docker run -i --rm \ -e LINKEDIN_PASSWORD="your_password" \ -p 8080:8080 \ stickerdaniel/linkedin-mcp-server \ - --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp + --no-setup --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 08183638..ca1ea276 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -135,6 +135,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.no_setup: config.server.setup = False + config.chrome.non_interactive = ( + True # Automatically set when --no-setup is used + ) if args.no_lazy_init: config.server.lazy_init = False diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 0acc3d88..2ce5fddc 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -1,10 +1,11 @@ # src/linkedin_mcp_server/config/secrets.py import logging -from typing import Dict, Optional +from typing import Dict import inquirer # type: ignore from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.exceptions import CredentialsNotFoundError from .providers import ( get_credentials_from_keyring, @@ -15,7 +16,7 @@ logger = logging.getLogger(__name__) -def get_credentials() -> Optional[Dict[str, str]]: +def get_credentials() -> Dict[str, str]: """Get LinkedIn credentials from config, keyring, or prompt.""" config = get_config() @@ -31,10 +32,12 @@ def get_credentials() -> Optional[Dict[str, str]]: print(f"Using LinkedIn credentials from {get_keyring_name()}") return {"email": credentials["email"], "password": credentials["password"]} - # If in non-interactive mode and no credentials found, return None + # If in non-interactive mode and no credentials found, raise error if config.chrome.non_interactive: - print("No credentials found in non-interactive mode") - return None + raise CredentialsNotFoundError( + "No LinkedIn credentials found. Please provide credentials via " + "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) or keyring." + ) # Otherwise, prompt for credentials return prompt_for_credentials() diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 63855e04..9fa86295 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -18,6 +18,18 @@ from linkedin_mcp_server.config import get_config from linkedin_mcp_server.config.providers import clear_credentials_from_keyring from linkedin_mcp_server.config.secrets import get_credentials +from linkedin_scraper.exceptions import ( + CaptchaRequiredError, + InvalidCredentialsError, + LoginTimeoutError, + RateLimitError, + SecurityChallengeError, + TwoFactorAuthError, +) +from linkedin_mcp_server.exceptions import ( + CredentialsNotFoundError, + DriverInitializationError, +) # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} @@ -87,24 +99,41 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: driver.set_page_load_timeout(60) # Try to log in - if login_to_linkedin(driver): - print("Successfully logged in to LinkedIn") - elif config.chrome.non_interactive: - # In non-interactive mode, if login fails, return None + try: + if login_to_linkedin(driver): + print("Successfully logged in to LinkedIn") + active_drivers[session_id] = driver + return driver + except ( + CaptchaRequiredError, + InvalidCredentialsError, + SecurityChallengeError, + TwoFactorAuthError, + RateLimitError, + LoginTimeoutError, + CredentialsNotFoundError, + ) as e: + # Clean up driver on login failure driver.quit() - return None - active_drivers[session_id] = driver - return driver + if config.chrome.non_interactive: + # In non-interactive mode, propagate the error + raise e + else: + # In interactive mode, handle the error + handle_login_error(e) + return None except Exception as e: error_msg = f"๐Ÿ›‘ Error creating web driver: {e}" print(error_msg) if config.chrome.non_interactive: - print("๐Ÿ›‘ Failed to initialize driver in non-interactive mode") - return None + raise DriverInitializationError(error_msg) + else: + raise WebDriverException(error_msg) + - raise WebDriverException(error_msg) +# Remove this function - linkedin-scraper now handles all error detection def login_to_linkedin(driver: webdriver.Chrome) -> bool: @@ -116,59 +145,105 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: Returns: bool: True if login was successful, False otherwise + + Raises: + Various login-related errors from linkedin-scraper """ config = get_config() # Get LinkedIn credentials from config - credentials = get_credentials() + try: + credentials = get_credentials() + except CredentialsNotFoundError as e: + if config.chrome.non_interactive: + raise e + # Only prompt if not in non-interactive mode + from linkedin_mcp_server.config.secrets import prompt_for_credentials + + credentials = prompt_for_credentials() if not credentials: - print("โŒ No credentials available") - return False + raise CredentialsNotFoundError("No credentials available") - try: - # Login to LinkedIn - print("๐Ÿ”‘ Logging in to LinkedIn...") + # Login to LinkedIn using enhanced linkedin-scraper + print("๐Ÿ”‘ Logging in to LinkedIn...") - from linkedin_scraper import actions # type: ignore + from linkedin_scraper import actions # type: ignore - actions.login(driver, credentials["email"], credentials["password"]) + # linkedin-scraper now handles all error detection and raises appropriate exceptions + actions.login( + driver, + credentials["email"], + credentials["password"], + interactive=not config.chrome.non_interactive, + ) - print("โœ… Successfully logged in to LinkedIn") - return True - except Exception as e: - error_msg = f"Failed to login: {str(e)}" - print(f"โŒ {error_msg}") + print("โœ… Successfully logged in to LinkedIn") + return True - if not config.chrome.non_interactive: + +def handle_login_error(error: Exception) -> None: + """Handle login errors in interactive mode.""" + config = get_config() + + print(f"\nโŒ Login failed: {str(error)}") + + if isinstance(error, InvalidCredentialsError): + print("โš ๏ธ Please check your email and password.") + retry = inquirer.prompt( + [ + inquirer.Confirm( + "retry", + message="Would you like to try with different credentials?", + default=True, + ), + ] + ) + if retry and retry.get("retry", False): + # Clear credentials from keyring and try again + clear_credentials_from_keyring() + # Try again + initialize_driver() + + elif isinstance(error, CaptchaRequiredError): + print("โš ๏ธ LinkedIn requires captcha verification.") + captcha_url = getattr(error, "captcha_url", str(error)) + print(f"๐Ÿ”— Please complete the captcha at: {captcha_url}") + if config.chrome.headless: print( - "โš ๏ธ You might need to confirm the login in your LinkedIn mobile app. " - "Please try again and confirm the login." + "๐Ÿ” Try running with visible browser window to complete captcha: " + "uv run main.py --no-headless" ) - if config.chrome.headless: - print( - "๐Ÿ” Try running with visible browser window to see what's happening: " - "uv run main.py --no-headless" - ) + elif isinstance(error, SecurityChallengeError): + print("โš ๏ธ LinkedIn requires a security challenge.") + challenge_url = getattr(error, "challenge_url", str(error)) + print(f"๐Ÿ”— Please complete the security challenge at: {challenge_url}") + if config.chrome.headless: + print( + "๐Ÿ” Try running with visible browser window to complete challenge: " + "uv run main.py --no-headless" + ) - retry = inquirer.prompt( - [ - inquirer.Confirm( - "retry", - message="Would you like to try with different credentials?", - default=True, - ), - ] + elif isinstance(error, TwoFactorAuthError): + print("โš ๏ธ Two-factor authentication is required.") + print( + "๐Ÿ“ฑ Please confirm the login in your LinkedIn mobile app or enter the 2FA code." + ) + if config.chrome.headless: + print( + "๐Ÿ” Try running with visible browser window to complete 2FA: " + "uv run main.py --no-headless" ) - if retry and retry.get("retry", False): - # Clear credentials from keyring and try again - clear_credentials_from_keyring() - # Try again with new credentials - return login_to_linkedin(driver) + elif isinstance(error, RateLimitError): + print("โš ๏ธ Too many login attempts. Please wait before trying again.") - return False + elif isinstance(error, LoginTimeoutError): + print("โš ๏ธ Login timed out. Please check your network connection.") + + else: + print("โš ๏ธ An unexpected error occurred during login.") def initialize_driver() -> None: @@ -204,8 +279,30 @@ def initialize_driver() -> None: if driver: print("โœ… Web driver initialized successfully") else: + if config.chrome.non_interactive: + raise DriverInitializationError( + "Failed to initialize web driver in non-interactive mode" + ) print("โŒ Failed to initialize web driver.") + except ( + CaptchaRequiredError, + InvalidCredentialsError, + SecurityChallengeError, + TwoFactorAuthError, + RateLimitError, + LoginTimeoutError, + CredentialsNotFoundError, + ) as e: + # In non-interactive mode, let the error propagate + if config.chrome.non_interactive: + raise e + # In interactive mode, handle gracefully + print(f"โŒ Error: {str(e)}") except WebDriverException as e: + if config.chrome.non_interactive: + raise DriverInitializationError( + f"Failed to initialize web driver: {str(e)}" + ) print(f"โŒ Failed to initialize web driver: {str(e)}") handle_driver_error() @@ -216,6 +313,11 @@ def handle_driver_error() -> None: """ config = get_config() + # Skip interactive handling in non-interactive mode + if config.chrome.non_interactive: + print("โŒ ChromeDriver is required for this application to work properly.") + sys.exit(1) + questions = [ inquirer.List( "chromedriver_action", diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py new file mode 100644 index 00000000..077ec6c8 --- /dev/null +++ b/linkedin_mcp_server/error_handler.py @@ -0,0 +1,183 @@ +""" +Centralized error handling for LinkedIn MCP Server tools. + +This module provides a DRY approach to error handling across all tools, +eliminating code duplication and ensuring consistent error responses. +""" + +from typing import Any, Dict, List + +from linkedin_scraper.exceptions import ( + CaptchaRequiredError, + InvalidCredentialsError, + LoginTimeoutError, + RateLimitError, + SecurityChallengeError, + TwoFactorAuthError, +) + +from linkedin_mcp_server.exceptions import ( + CredentialsNotFoundError, + LinkedInMCPError, +) + + +def handle_linkedin_errors(func): + """ + Decorator to handle LinkedIn MCP errors consistently across all tools. + + This decorator wraps tool functions and converts exceptions into + structured error responses that MCP clients can understand. + + Args: + func: The tool function to wrap + + Returns: + The decorated function that returns structured error responses + """ + + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + return convert_exception_to_response(e, func.__name__) + + return wrapper + + +def handle_linkedin_errors_list(func): + """ + Decorator to handle LinkedIn MCP errors for functions that return lists. + + Similar to handle_linkedin_errors but returns errors in list format. + + Args: + func: The tool function to wrap + + Returns: + The decorated function that returns structured error responses in list format + """ + + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + return convert_exception_to_list_response(e, func.__name__) + + return wrapper + + +def convert_exception_to_response( + exception: Exception, context: str = "" +) -> Dict[str, Any]: + """ + Convert an exception to a structured MCP response. + + Args: + exception: The exception to convert + context: Additional context about where the error occurred + + Returns: + Structured error response dictionary + """ + if isinstance(exception, CredentialsNotFoundError): + return { + "error": "credentials_not_found", + "message": str(exception), + "resolution": "Provide LinkedIn credentials via environment variables", + } + + elif isinstance(exception, InvalidCredentialsError): + return { + "error": "invalid_credentials", + "message": str(exception), + "resolution": "Check your LinkedIn email and password", + } + + elif isinstance(exception, CaptchaRequiredError): + return { + "error": "captcha_required", + "message": str(exception), + "captcha_url": exception.captcha_url, + "resolution": "Complete the captcha challenge manually", + } + + elif isinstance(exception, SecurityChallengeError): + return { + "error": "security_challenge_required", + "message": str(exception), + "challenge_url": getattr(exception, "challenge_url", None), + "resolution": "Complete the security challenge manually", + } + + elif isinstance(exception, TwoFactorAuthError): + return { + "error": "two_factor_auth_required", + "message": str(exception), + "resolution": "Complete 2FA verification", + } + + elif isinstance(exception, RateLimitError): + return { + "error": "rate_limit", + "message": str(exception), + "resolution": "Wait before attempting to login again", + } + + elif isinstance(exception, LoginTimeoutError): + return { + "error": "login_timeout", + "message": str(exception), + "resolution": "Check network connection and try again", + } + + elif isinstance(exception, LinkedInMCPError): + return {"error": "linkedin_error", "message": str(exception)} + + else: + # Generic error handling + print(f"โŒ Error in {context}: {exception}") + return { + "error": "unknown_error", + "message": f"Failed to execute {context}: {str(exception)}", + } + + +def convert_exception_to_list_response( + exception: Exception, context: str = "" +) -> List[Dict[str, Any]]: + """ + Convert an exception to a list-formatted structured MCP response. + + Some tools return lists, so this provides the same error handling + but wrapped in a list format. + + Args: + exception: The exception to convert + context: Additional context about where the error occurred + + Returns: + List containing single structured error response dictionary + """ + return [convert_exception_to_response(exception, context)] + + +def safe_get_driver(): + """ + Safely get or create a driver with proper error handling. + + Returns: + Driver instance or None if initialization fails + + Raises: + LinkedInMCPError: If driver initialization fails in non-interactive mode + """ + from linkedin_mcp_server.drivers.chrome import get_or_create_driver + + driver = get_or_create_driver() + if not driver: + from linkedin_mcp_server.exceptions import DriverInitializationError + + raise DriverInitializationError("Failed to initialize Chrome driver") + + return driver diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py new file mode 100644 index 00000000..4f5799fb --- /dev/null +++ b/linkedin_mcp_server/exceptions.py @@ -0,0 +1,24 @@ +""" +Custom exceptions for LinkedIn MCP Server. + +This module defines specific exception types for different error scenarios +to provide better error handling and reporting to MCP clients. +""" + + +class LinkedInMCPError(Exception): + """Base exception for LinkedIn MCP Server.""" + + pass + + +class CredentialsNotFoundError(LinkedInMCPError): + """No credentials available in non-interactive mode.""" + + pass + + +class DriverInitializationError(LinkedInMCPError): + """Failed to initialize Chrome WebDriver.""" + + pass diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py new file mode 100644 index 00000000..d54dcb61 --- /dev/null +++ b/linkedin_mcp_server/logging_config.py @@ -0,0 +1,76 @@ +""" +Logging configuration for LinkedIn MCP Server. + +This module provides structured JSON logging for better integration +with MCP clients and monitoring systems. +""" + +import json +import logging +from typing import Any, Dict + + +class MCPJSONFormatter(logging.Formatter): + """JSON formatter for MCP server logs.""" + + def format(self, record: logging.LogRecord) -> str: + """Format log record as JSON. + + Args: + record: The log record to format + + Returns: + JSON-formatted log string + """ + log_data: Dict[str, Any] = { + "timestamp": self.formatTime(record), + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + } + + # Add error details if present + if hasattr(record, "error_type"): + log_data["error_type"] = record.error_type + if hasattr(record, "error_details"): + log_data["error_details"] = record.error_details + + # Add exception info if present + if record.exc_info: + log_data["exception"] = self.formatException(record.exc_info) + + return json.dumps(log_data) + + +def configure_logging(debug: bool = False, json_format: bool = False) -> None: + """Configure logging for the LinkedIn MCP Server. + + Args: + debug: Whether to enable debug logging + json_format: Whether to use JSON formatting for logs + """ + log_level = logging.DEBUG if debug else logging.INFO + + if json_format: + formatter = MCPJSONFormatter() + else: + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(log_level) + + # Remove existing handlers + for handler in root_logger.handlers[:]: + root_logger.removeHandler(handler) + + # Add console handler + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + root_logger.addHandler(console_handler) + + # Set specific loggers + logging.getLogger("selenium").setLevel(logging.WARNING) + logging.getLogger("urllib3").setLevel(logging.WARNING) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 4690a940..52e7e699 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -10,7 +10,7 @@ from fastmcp import FastMCP from linkedin_scraper import Company -from linkedin_mcp_server.drivers.chrome import get_or_create_driver +from linkedin_mcp_server.error_handler import handle_linkedin_errors, safe_get_driver def register_company_tools(mcp: FastMCP) -> None: @@ -22,6 +22,7 @@ def register_company_tools(mcp: FastMCP) -> None: """ @mcp.tool() + @handle_linkedin_errors async def get_company_profile( linkedin_url: str, get_employees: bool = False ) -> Dict[str, Any]: @@ -35,62 +36,58 @@ async def get_company_profile( Returns: Dict[str, Any]: Structured data from the company's profile """ - driver = get_or_create_driver() - - try: - print(f"๐Ÿข Scraping company: {linkedin_url}") - if get_employees: - print("โš ๏ธ Fetching employees may take a while...") - - company = Company( - linkedin_url, - driver=driver, - get_employees=get_employees, - close_on_complete=False, - ) - - # Convert showcase pages to structured dictionaries - showcase_pages: List[Dict[str, Any]] = [ - { - "name": page.name, - "linkedin_url": page.linkedin_url, - "followers": page.followers, - } - for page in company.showcase_pages - ] - - # Convert affiliated companies to structured dictionaries - affiliated_companies: List[Dict[str, Any]] = [ - { - "name": affiliated.name, - "linkedin_url": affiliated.linkedin_url, - "followers": affiliated.followers, - } - for affiliated in company.affiliated_companies - ] - - # Build the result dictionary - result: Dict[str, Any] = { - "name": company.name, - "about_us": company.about_us, - "website": company.website, - "phone": company.phone, - "headquarters": company.headquarters, - "founded": company.founded, - "industry": company.industry, - "company_type": company.company_type, - "company_size": company.company_size, - "specialties": company.specialties, - "showcase_pages": showcase_pages, - "affiliated_companies": affiliated_companies, - "headcount": company.headcount, + driver = safe_get_driver() + + print(f"๐Ÿข Scraping company: {linkedin_url}") + if get_employees: + print("โš ๏ธ Fetching employees may take a while...") + + company = Company( + linkedin_url, + driver=driver, + get_employees=get_employees, + close_on_complete=False, + ) + + # Convert showcase pages to structured dictionaries + showcase_pages: List[Dict[str, Any]] = [ + { + "name": page.name, + "linkedin_url": page.linkedin_url, + "followers": page.followers, } - - # Add employees if requested and available - if get_employees and company.employees: - result["employees"] = company.employees - - return result - except Exception as e: - print(f"โŒ Error scraping company: {e}") - return {"error": f"Failed to scrape company profile: {str(e)}"} + for page in company.showcase_pages + ] + + # Convert affiliated companies to structured dictionaries + affiliated_companies: List[Dict[str, Any]] = [ + { + "name": affiliated.name, + "linkedin_url": affiliated.linkedin_url, + "followers": affiliated.followers, + } + for affiliated in company.affiliated_companies + ] + + # Build the result dictionary + result: Dict[str, Any] = { + "name": company.name, + "about_us": company.about_us, + "website": company.website, + "phone": company.phone, + "headquarters": company.headquarters, + "founded": company.founded, + "industry": company.industry, + "company_type": company.company_type, + "company_size": company.company_size, + "specialties": company.specialties, + "showcase_pages": showcase_pages, + "affiliated_companies": affiliated_companies, + "headcount": company.headcount, + } + + # Add employees if requested and available + if get_employees and company.employees: + result["employees"] = company.employees + + return result diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 1af2a91b..42e2005f 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -1,6 +1,6 @@ # src/linkedin_mcp_server/tools/job.py """ -Job-related tools for LinkedIn MCP server. +Job tools for LinkedIn MCP server. This module provides tools for scraping LinkedIn job postings and searches. """ @@ -10,7 +10,11 @@ from fastmcp import FastMCP from linkedin_scraper import Job, JobSearch -from linkedin_mcp_server.drivers.chrome import get_or_create_driver +from linkedin_mcp_server.error_handler import ( + handle_linkedin_errors, + handle_linkedin_errors_list, + safe_get_driver, +) def register_job_tools(mcp: FastMCP) -> None: @@ -22,87 +26,72 @@ def register_job_tools(mcp: FastMCP) -> None: """ @mcp.tool() + @handle_linkedin_errors async def get_job_details(job_url: str) -> Dict[str, Any]: """ Scrape job details from a LinkedIn job posting. IMPORTANT: Only use direct LinkedIn job URLs in the format: - https://www.linkedin.com/jobs/view/[JOB_ID] + https://www.linkedin.com/jobs/view/XXXXXXXX/ where XXXXXXXX is the job ID. - DO NOT use collection URLs like: - - /collections/recommended/?currentJobId= - - /jobs/search/?keywords= - - If you have a collection URL, extract the job ID and convert it to the direct format. - Example: If you see currentJobId=1234567890, use https://www.linkedin.com/jobs/view/1234567890 + This tool extracts comprehensive job information including title, company, + location, posting date, application count, and full job description. Args: - job_url (str): The direct LinkedIn job URL (must be /jobs/view/[ID] format) + job_url (str): The LinkedIn job posting URL to scrape Returns: Dict[str, Any]: Structured job data including title, company, location, posting date, application count, and job description (may be empty if content is protected) """ - driver = get_or_create_driver() + driver = safe_get_driver() - try: - print(f"๐Ÿ’ผ Scraping job: {job_url}") - job = Job(job_url, driver=driver, close_on_complete=False) + print(f"๐Ÿ’ผ Scraping job: {job_url}") + job = Job(job_url, driver=driver, close_on_complete=False) - # Convert job object to a dictionary - return job.to_dict() - except Exception as e: - print(f"โŒ Error scraping job: {e}") - return {"error": f"Failed to scrape job posting: {str(e)}"} + # Convert job object to a dictionary + return job.to_dict() @mcp.tool() + @handle_linkedin_errors_list async def search_jobs(search_term: str) -> List[Dict[str, Any]]: """ - Search for jobs on LinkedIn with the given search term. + Search for jobs on LinkedIn (Note: This tool has compatibility issues). Args: - search_term (str): The job search query + search_term (str): The search term to use for job search Returns: List[Dict[str, Any]]: List of job search results """ - driver = get_or_create_driver() + driver = safe_get_driver() - try: - print(f"๐Ÿ” Searching jobs: {search_term}") - job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) - jobs = job_search.search(search_term) + print(f"๐Ÿ” Searching jobs: {search_term}") + job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) + jobs = job_search.search(search_term) - # Convert job objects to dictionaries - return [job.to_dict() for job in jobs] - except Exception as e: - print(f"โŒ Error searching jobs: {e}") - return [{"error": f"Failed to search jobs: {str(e)}"}] + # Convert job objects to dictionaries + return [job.to_dict() for job in jobs] @mcp.tool() + @handle_linkedin_errors_list async def get_recommended_jobs() -> List[Dict[str, Any]]: """ - Get recommended jobs from your LinkedIn homepage. + Get recommended jobs from LinkedIn (Note: This tool has compatibility issues). Returns: List[Dict[str, Any]]: List of recommended jobs """ - driver = get_or_create_driver() - - try: - print("๐Ÿ“‹ Getting recommended jobs") - job_search = JobSearch( - driver=driver, - close_on_complete=False, - scrape=True, - scrape_recommended_jobs=True, - ) - - # Get recommended jobs and convert to dictionaries - if hasattr(job_search, "recommended_jobs") and job_search.recommended_jobs: - return [job.to_dict() for job in job_search.recommended_jobs] - else: - return [] - except Exception as e: - print(f"โŒ Error getting recommended jobs: {e}") - return [{"error": f"Failed to get recommended jobs: {str(e)}"}] + driver = safe_get_driver() + + print("๐Ÿ“‹ Getting recommended jobs") + job_search = JobSearch( + driver=driver, + close_on_complete=False, + scrape=False, + ) + + if job_search.recommended_jobs: + return [job.to_dict() for job in job_search.recommended_jobs] + else: + return [] diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 236fa7f4..a6706d90 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -10,7 +10,7 @@ from fastmcp import FastMCP from linkedin_scraper import Person -from linkedin_mcp_server.drivers.chrome import get_or_create_driver +from linkedin_mcp_server.error_handler import handle_linkedin_errors, safe_get_driver def register_person_tools(mcp: FastMCP) -> None: @@ -22,6 +22,7 @@ def register_person_tools(mcp: FastMCP) -> None: """ @mcp.tool() + @handle_linkedin_errors async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: """ Scrape a person's LinkedIn profile. @@ -32,70 +33,66 @@ async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: Returns: Dict[str, Any]: Structured data from the person's profile """ - driver = get_or_create_driver() - - try: - print(f"๐Ÿ” Scraping profile: {linkedin_url}") - person = Person(linkedin_url, driver=driver, close_on_complete=False) - - # Convert experiences to structured dictionaries - experiences: List[Dict[str, Any]] = [ - { - "position_title": exp.position_title, - "company": exp.institution_name, - "from_date": exp.from_date, - "to_date": exp.to_date, - "duration": exp.duration, - "location": exp.location, - "description": exp.description, - } - for exp in person.experiences - ] - - # Convert educations to structured dictionaries - educations: List[Dict[str, Any]] = [ - { - "institution": edu.institution_name, - "degree": edu.degree, - "from_date": edu.from_date, - "to_date": edu.to_date, - "description": edu.description, - } - for edu in person.educations - ] - - # Convert interests to list of titles - interests: List[str] = [interest.title for interest in person.interests] - - # Convert accomplishments to structured dictionaries - accomplishments: List[Dict[str, str]] = [ - {"category": acc.category, "title": acc.title} - for acc in person.accomplishments - ] - - # Convert contacts to structured dictionaries - contacts: List[Dict[str, str]] = [ - { - "name": contact.name, - "occupation": contact.occupation, - "url": contact.url, - } - for contact in person.contacts - ] - - # Return the complete profile data - return { - "name": person.name, - "about": person.about, - "experiences": experiences, - "educations": educations, - "interests": interests, - "accomplishments": accomplishments, - "contacts": contacts, - "company": person.company, - "job_title": person.job_title, - "open_to_work": getattr(person, "open_to_work", False), + driver = safe_get_driver() + + print(f"๐Ÿ” Scraping profile: {linkedin_url}") + person = Person(linkedin_url, driver=driver, close_on_complete=False) + + # Convert experiences to structured dictionaries + experiences: List[Dict[str, Any]] = [ + { + "position_title": exp.position_title, + "company": exp.institution_name, + "from_date": exp.from_date, + "to_date": exp.to_date, + "duration": exp.duration, + "location": exp.location, + "description": exp.description, } - except Exception as e: - print(f"โŒ Error scraping profile: {e}") - return {"error": f"Failed to scrape profile: {str(e)}"} + for exp in person.experiences + ] + + # Convert educations to structured dictionaries + educations: List[Dict[str, Any]] = [ + { + "institution": edu.institution_name, + "degree": edu.degree, + "from_date": edu.from_date, + "to_date": edu.to_date, + "description": edu.description, + } + for edu in person.educations + ] + + # Convert interests to list of titles + interests: List[str] = [interest.title for interest in person.interests] + + # Convert accomplishments to structured dictionaries + accomplishments: List[Dict[str, str]] = [ + {"category": acc.category, "title": acc.title} + for acc in person.accomplishments + ] + + # Convert contacts to structured dictionaries + contacts: List[Dict[str, str]] = [ + { + "name": contact.name, + "occupation": contact.occupation, + "url": contact.url, + } + for contact in person.contacts + ] + + # Return the complete profile data + return { + "name": person.name, + "about": person.about, + "experiences": experiences, + "educations": educations, + "interests": interests, + "accomplishments": accomplishments, + "contacts": contacts, + "company": person.company, + "job_title": person.job_title, + "open_to_work": getattr(person, "open_to_work", False), + } diff --git a/main.py b/main.py index 8e23d5d3..65b86987 100644 --- a/main.py +++ b/main.py @@ -14,6 +14,8 @@ # Import the new centralized configuration from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.chrome import initialize_driver +from linkedin_mcp_server.exceptions import LinkedInMCPError +from linkedin_mcp_server.logging_config import configure_logging from linkedin_mcp_server.server import create_mcp_server, shutdown_handler @@ -43,17 +45,28 @@ def main() -> None: config = get_config() # Configure logging - log_level = logging.DEBUG if config.server.debug else logging.ERROR - logging.basicConfig( - level=log_level, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + configure_logging( + debug=config.server.debug, + json_format=config.chrome.non_interactive, # Use JSON format in non-interactive mode ) logger = logging.getLogger("linkedin_mcp_server") logger.debug(f"Server configuration: {config}") # Initialize the driver with configuration (initialize driver checks for lazy init options) - initialize_driver() + try: + initialize_driver() + except LinkedInMCPError as e: + logger.error( + f"Failed to initialize driver: {str(e)}", + extra={"error_type": type(e).__name__, "error_details": str(e)}, + ) + if config.chrome.non_interactive: + sys.exit(1) + else: + print(f"\nโŒ Error: {str(e)}") + print("๐Ÿ’ก Tip: Check your credentials and try again.") + sys.exit(1) # Decide transport transport = config.server.transport diff --git a/pyproject.toml b/pyproject.toml index 5284dcfc..72572f6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ linkedin_mcp_server = ["py.typed"] [tool.uv.sources] -linkedin-scraper = { git = "https://github.com/joeyism/linkedin_scraper.git" } +linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git" } [dependency-groups] dev = [ diff --git a/uv.lock b/uv.lock index c8c796cb..db7a201a 100644 --- a/uv.lock +++ b/uv.lock @@ -684,7 +684,7 @@ requires-dist = [ { name = "fastmcp", specifier = ">=2.10.1" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, - { name = "linkedin-scraper", git = "https://github.com/joeyism/linkedin_scraper.git" }, + { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, { name = "pyperclip", specifier = ">=1.9.0" }, ] @@ -702,53 +702,52 @@ dev = [ [[package]] name = "linkedin-scraper" version = "2.11.5" -source = { git = "https://github.com/joeyism/linkedin_scraper.git#44eafb893e691732474e37a20123c5cc9007e0ad" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#88b22aa1cb15371a4460ed2f4988ac79ea4c200f" } dependencies = [ { name = "lxml" }, + { name = "python-dotenv" }, { name = "requests" }, { name = "selenium" }, ] [[package]] name = "lxml" -version = "5.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" }, - { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" }, - { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" }, - { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" }, - { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" }, - { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" }, - { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" }, - { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" }, - { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" }, - { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" }, - { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" }, - { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" }, - { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" }, - { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" }, - { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" }, - { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086, upload-time = "2025-04-23T01:46:52.218Z" }, - { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613, upload-time = "2025-04-23T01:46:55.281Z" }, - { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008, upload-time = "2025-04-23T01:46:57.817Z" }, - { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915, upload-time = "2025-04-23T01:47:00.745Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890, upload-time = "2025-04-23T01:47:04.702Z" }, - { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644, upload-time = "2025-04-23T01:47:07.833Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817, upload-time = "2025-04-23T01:47:10.317Z" }, - { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916, upload-time = "2025-04-23T01:47:12.823Z" }, - { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274, upload-time = "2025-04-23T01:47:15.916Z" }, - { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757, upload-time = "2025-04-23T01:47:19.793Z" }, - { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028, upload-time = "2025-04-23T01:47:22.401Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487, upload-time = "2025-04-23T01:47:25.513Z" }, - { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688, upload-time = "2025-04-23T01:47:28.454Z" }, - { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043, upload-time = "2025-04-23T01:47:31.208Z" }, - { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569, upload-time = "2025-04-23T01:47:33.805Z" }, - { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270, upload-time = "2025-04-23T01:47:36.133Z" }, - { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606, upload-time = "2025-04-23T01:47:39.028Z" }, +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/ed/60eb6fa2923602fba988d9ca7c5cdbd7cf25faa795162ed538b527a35411/lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72", size = 4096938, upload-time = "2025-06-26T16:28:19.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/c3/d01d735c298d7e0ddcedf6f028bf556577e5ab4f4da45175ecd909c79378/lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108", size = 8429515, upload-time = "2025-06-26T16:26:06.776Z" }, + { url = "https://files.pythonhosted.org/packages/06/37/0e3eae3043d366b73da55a86274a590bae76dc45aa004b7042e6f97803b1/lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be", size = 4601387, upload-time = "2025-06-26T16:26:09.511Z" }, + { url = "https://files.pythonhosted.org/packages/a3/28/e1a9a881e6d6e29dda13d633885d13acb0058f65e95da67841c8dd02b4a8/lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab", size = 5228928, upload-time = "2025-06-26T16:26:12.337Z" }, + { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" }, + { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" }, + { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" }, + { url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016, upload-time = "2025-07-03T19:19:06.008Z" }, + { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" }, + { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" }, + { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" }, + { url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523, upload-time = "2025-07-03T19:19:09.837Z" }, + { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" }, + { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" }, + { url = "https://files.pythonhosted.org/packages/55/10/dc8e5290ae4c94bdc1a4c55865be7e1f31dfd857a88b21cbba68b5fea61b/lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb", size = 3674431, upload-time = "2025-06-26T16:26:35.959Z" }, + { url = "https://files.pythonhosted.org/packages/79/21/6e7c060822a3c954ff085e5e1b94b4a25757c06529eac91e550f3f5cd8b8/lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da", size = 8414372, upload-time = "2025-06-26T16:26:39.079Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f6/051b1607a459db670fc3a244fa4f06f101a8adf86cda263d1a56b3a4f9d5/lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7", size = 4593940, upload-time = "2025-06-26T16:26:41.891Z" }, + { url = "https://files.pythonhosted.org/packages/8e/74/dd595d92a40bda3c687d70d4487b2c7eff93fd63b568acd64fedd2ba00fe/lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3", size = 5214329, upload-time = "2025-06-26T16:26:44.669Z" }, + { url = "https://files.pythonhosted.org/packages/52/46/3572761efc1bd45fcafb44a63b3b0feeb5b3f0066886821e94b0254f9253/lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81", size = 4947559, upload-time = "2025-06-28T18:47:31.091Z" }, + { url = "https://files.pythonhosted.org/packages/94/8a/5e40de920e67c4f2eef9151097deb9b52d86c95762d8ee238134aff2125d/lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1", size = 5102143, upload-time = "2025-06-28T18:47:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4b/20555bdd75d57945bdabfbc45fdb1a36a1a0ff9eae4653e951b2b79c9209/lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24", size = 5021931, upload-time = "2025-06-26T16:26:47.503Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/cf03b412f3763d4ca23b25e70c96a74cfece64cec3addf1c4ec639586b13/lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a", size = 5645469, upload-time = "2025-07-03T19:19:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/d4/dd/39c8507c16db6031f8c1ddf70ed95dbb0a6d466a40002a3522c128aba472/lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29", size = 5247467, upload-time = "2025-06-26T16:26:49.998Z" }, + { url = "https://files.pythonhosted.org/packages/4d/56/732d49def0631ad633844cfb2664563c830173a98d5efd9b172e89a4800d/lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4", size = 4720601, upload-time = "2025-06-26T16:26:52.564Z" }, + { url = "https://files.pythonhosted.org/packages/8f/7f/6b956fab95fa73462bca25d1ea7fc8274ddf68fb8e60b78d56c03b65278e/lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca", size = 5060227, upload-time = "2025-06-26T16:26:55.054Z" }, + { url = "https://files.pythonhosted.org/packages/97/06/e851ac2924447e8b15a294855caf3d543424364a143c001014d22c8ca94c/lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf", size = 4790637, upload-time = "2025-06-26T16:26:57.384Z" }, + { url = "https://files.pythonhosted.org/packages/06/d4/fd216f3cd6625022c25b336c7570d11f4a43adbaf0a56106d3d496f727a7/lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f", size = 5662049, upload-time = "2025-07-03T19:19:16.409Z" }, + { url = "https://files.pythonhosted.org/packages/52/03/0e764ce00b95e008d76b99d432f1807f3574fb2945b496a17807a1645dbd/lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef", size = 5272430, upload-time = "2025-06-26T16:27:00.031Z" }, + { url = "https://files.pythonhosted.org/packages/5f/01/d48cc141bc47bc1644d20fe97bbd5e8afb30415ec94f146f2f76d0d9d098/lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181", size = 3612896, upload-time = "2025-06-26T16:27:04.251Z" }, + { url = "https://files.pythonhosted.org/packages/f4/87/6456b9541d186ee7d4cb53bf1b9a0d7f3b1068532676940fdd594ac90865/lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e", size = 4013132, upload-time = "2025-06-26T16:27:06.415Z" }, + { url = "https://files.pythonhosted.org/packages/b7/42/85b3aa8f06ca0d24962f8100f001828e1f1f1a38c954c16e71154ed7d53a/lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03", size = 3672642, upload-time = "2025-06-26T16:27:09.888Z" }, ] [[package]] @@ -1148,11 +1147,11 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.1.0" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] [[package]] @@ -1224,7 +1223,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1232,9 +1231,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, ] [[package]] From 7fafbaba0a314a96ad43f86f47169f319379fd1d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 11:49:08 -0400 Subject: [PATCH 111/565] docs(README): remove outdated error handling section --- README.md | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/README.md b/README.md index 20e66625..2848174b 100644 --- a/README.md +++ b/README.md @@ -37,17 +37,6 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > - **Recommended Jobs** (`get_recommended_jobs`): Selenium method compatibility issues > - **Company Profiles** (`get_company_profile`): Some companies can't be accessed / may return empty results (need further investigation) -## ๐Ÿ›ก๏ธ Error Handling & Non-Interactive Mode - -**NEW**: Enhanced error handling for Docker and CI/CD environments! - -The server now provides detailed error information when login fails: -- **Specific error types**: `credentials_not_found`, `invalid_credentials`, `captcha_required`, `two_factor_auth_required`, `rate_limit` -- **Non-interactive mode**: Use `--no-setup` to skip all prompts (perfect for Docker) -- **Structured responses**: Each error includes type, message, and resolution steps - -For detailed error handling documentation, see [ERROR_HANDLING.md](ERROR_HANDLING.md) - --- ## ๐Ÿณ Docker Setup (Recommended - Universal) From 2cd314b5651961d435cf173bb7556d3fd8092a3b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 11:52:16 -0400 Subject: [PATCH 112/565] refactor(error_handler): implement structured logging for errors --- linkedin_mcp_server/drivers/chrome.py | 19 ++++++++----------- linkedin_mcp_server/error_handler.py | 13 +++++++++++-- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 9fa86295..5860e278 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -10,14 +10,6 @@ from typing import Dict, Optional import inquirer # type: ignore -from selenium import webdriver -from selenium.common.exceptions import WebDriverException -from selenium.webdriver.chrome.options import Options -from selenium.webdriver.chrome.service import Service - -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.providers import clear_credentials_from_keyring -from linkedin_mcp_server.config.secrets import get_credentials from linkedin_scraper.exceptions import ( CaptchaRequiredError, InvalidCredentialsError, @@ -26,6 +18,14 @@ SecurityChallengeError, TwoFactorAuthError, ) +from selenium import webdriver +from selenium.common.exceptions import WebDriverException +from selenium.webdriver.chrome.options import Options +from selenium.webdriver.chrome.service import Service + +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.providers import clear_credentials_from_keyring +from linkedin_mcp_server.config.secrets import get_credentials from linkedin_mcp_server.exceptions import ( CredentialsNotFoundError, DriverInitializationError, @@ -133,9 +133,6 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: raise WebDriverException(error_msg) -# Remove this function - linkedin-scraper now handles all error detection - - def login_to_linkedin(driver: webdriver.Chrome) -> bool: """ Log in to LinkedIn using stored or provided credentials. diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 077ec6c8..be38dc99 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -5,6 +5,7 @@ eliminating code duplication and ensuring consistent error responses. """ +import logging from typing import Any, Dict, List from linkedin_scraper.exceptions import ( @@ -135,8 +136,16 @@ def convert_exception_to_response( return {"error": "linkedin_error", "message": str(exception)} else: - # Generic error handling - print(f"โŒ Error in {context}: {exception}") + # Generic error handling with structured logging + logger = logging.getLogger(__name__) + logger.error( + f"Error in {context}: {exception}", + extra={ + "context": context, + "exception_type": type(exception).__name__, + "exception_message": str(exception), + }, + ) return { "error": "unknown_error", "message": f"Failed to execute {context}: {str(exception)}", From 39e05ab163af680ec611aca3705674d11fde0180 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 11:53:27 -0400 Subject: [PATCH 113/565] fix(login): update error handling to prevent infinite loops --- linkedin_mcp_server/drivers/chrome.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 5860e278..21503028 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -197,10 +197,11 @@ def handle_login_error(error: Exception) -> None: ] ) if retry and retry.get("retry", False): - # Clear credentials from keyring and try again + # Clear credentials from keyring clear_credentials_from_keyring() - # Try again - initialize_driver() + print("โœ… Credentials cleared from keyring.") + print("๐Ÿ’ก Please restart the application to try with new credentials.") + print(" Example: uv run main.py --no-headless") elif isinstance(error, CaptchaRequiredError): print("โš ๏ธ LinkedIn requires captcha verification.") From 026dac2f0a0558ad2f36e4fe049ccf898b291132 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 11:55:25 -0400 Subject: [PATCH 114/565] fix(error_handler): make error handling functions async --- linkedin_mcp_server/error_handler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index be38dc99..481de0ab 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -37,9 +37,9 @@ def handle_linkedin_errors(func): The decorated function that returns structured error responses """ - def wrapper(*args, **kwargs): + async def wrapper(*args, **kwargs): try: - return func(*args, **kwargs) + return await func(*args, **kwargs) except Exception as e: return convert_exception_to_response(e, func.__name__) @@ -59,9 +59,9 @@ def handle_linkedin_errors_list(func): The decorated function that returns structured error responses in list format """ - def wrapper(*args, **kwargs): + async def wrapper(*args, **kwargs): try: - return func(*args, **kwargs) + return await func(*args, **kwargs) except Exception as e: return convert_exception_to_list_response(e, func.__name__) From 66ec545a49d218bc1ac76e1072763a370c846236 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 12:03:58 -0400 Subject: [PATCH 115/565] fix(chrome): update ChromeDriver path handling messages --- linkedin_mcp_server/drivers/chrome.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 21503028..52c50d27 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -339,11 +339,13 @@ def handle_driver_error() -> None: config.chrome.chromedriver_path = path os.environ["CHROMEDRIVER"] = path print(f"โœ… ChromeDriver path set to: {path}") - # Try again with the new path - initialize_driver() + print("๐Ÿ’ก Please restart the application to use the new ChromeDriver path.") + print(" Example: uv run main.py") + sys.exit(0) else: print(f"โš ๏ธ Warning: The specified path does not exist: {path}") - initialize_driver() + print("๐Ÿ’ก Please check the path and restart the application.") + sys.exit(1) elif answers["chromedriver_action"] == "help": print("\n๐Ÿ“‹ ChromeDriver Installation Guide:") From 9d0d9136258c0c77884b94ddce30de119e0fe7ce Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 13:20:38 -0400 Subject: [PATCH 116/565] refactor(error_handler): streamline error handling for tools, fix multiple small isssues, verify all tools are working as expected --- linkedin_mcp_server/cli.py | 4 +- linkedin_mcp_server/drivers/chrome.py | 189 ++++++++++++++------------ linkedin_mcp_server/error_handler.py | 43 ++---- linkedin_mcp_server/tools/company.py | 114 ++++++++-------- linkedin_mcp_server/tools/job.py | 65 +++++---- linkedin_mcp_server/tools/person.py | 130 +++++++++--------- main.py | 25 +++- uv.lock | 14 +- 8 files changed, 305 insertions(+), 279 deletions(-) diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index 7c6457d5..98521293 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -26,9 +26,7 @@ def print_claude_config() -> None: and copies it to the clipboard for easy pasting. """ config = get_config() - current_dir = os.path.abspath( - os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - ) + current_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) # Find the full path to uv executable try: diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 52c50d27..b0f14cad 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -98,31 +98,37 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: # Add a page load timeout for safety driver.set_page_load_timeout(60) - # Try to log in - try: - if login_to_linkedin(driver): - print("Successfully logged in to LinkedIn") - active_drivers[session_id] = driver - return driver - except ( - CaptchaRequiredError, - InvalidCredentialsError, - SecurityChallengeError, - TwoFactorAuthError, - RateLimitError, - LoginTimeoutError, - CredentialsNotFoundError, - ) as e: - # Clean up driver on login failure - driver.quit() - - if config.chrome.non_interactive: - # In non-interactive mode, propagate the error - raise e - else: - # In interactive mode, handle the error - handle_login_error(e) - return None + # Try to log in with retry loop + max_retries = 3 + for attempt in range(max_retries): + try: + if login_to_linkedin(driver): + print("Successfully logged in to LinkedIn") + active_drivers[session_id] = driver + return driver + except ( + CaptchaRequiredError, + InvalidCredentialsError, + SecurityChallengeError, + TwoFactorAuthError, + RateLimitError, + LoginTimeoutError, + CredentialsNotFoundError, + ) as e: + if config.chrome.non_interactive: + # In non-interactive mode, propagate the error + driver.quit() + raise e + else: + # In interactive mode, handle the error and potentially retry + should_retry = handle_login_error(e) + if should_retry and attempt < max_retries - 1: + print(f"๐Ÿ”„ Retry attempt {attempt + 2}/{max_retries}") + continue + else: + # Clean up driver on final failure + driver.quit() + return None except Exception as e: error_msg = f"๐Ÿ›‘ Error creating web driver: {e}" print(error_msg) @@ -167,26 +173,74 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: from linkedin_scraper import actions # type: ignore - # linkedin-scraper now handles all error detection and raises appropriate exceptions - actions.login( - driver, - credentials["email"], - credentials["password"], - interactive=not config.chrome.non_interactive, - ) + # Use linkedin-scraper login but with simplified error handling + try: + actions.login( + driver, + credentials["email"], + credentials["password"], + interactive=not config.chrome.non_interactive, + ) + + print("โœ… Successfully logged in to LinkedIn") + return True - print("โœ… Successfully logged in to LinkedIn") - return True + except Exception: + # Check current page to determine the real issue + current_url = driver.current_url + if "checkpoint/challenge" in current_url: + # We're on a challenge page - this is the real issue, not credentials + if "security check" in driver.page_source.lower(): + raise SecurityChallengeError( + challenge_url=current_url, + message="LinkedIn requires a security challenge. Please complete it manually and restart the application.", + ) + else: + raise CaptchaRequiredError( + captcha_url=current_url, + ) -def handle_login_error(error: Exception) -> None: - """Handle login errors in interactive mode.""" + elif "feed" in current_url or "mynetwork" in current_url: + # Actually logged in successfully despite the exception + print("โœ… Successfully logged in to LinkedIn") + return True + + else: + # Check for actual credential issues + page_source = driver.page_source.lower() + if any( + pattern in page_source + for pattern in ["wrong email", "wrong password", "incorrect", "invalid"] + ): + raise InvalidCredentialsError("Invalid LinkedIn email or password.") + elif "too many" in page_source: + raise RateLimitError( + "Too many login attempts. Please wait and try again later." + ) + else: + raise LoginTimeoutError( + "Login failed. Please check your credentials and network connection." + ) + + +def handle_login_error(error: Exception) -> bool: + """Handle login errors in interactive mode. + + Returns: + bool: True if user wants to retry, False if they want to exit + """ config = get_config() - print(f"\nโŒ Login failed: {str(error)}") + print(f"\nโŒ {str(error)}") + + if config.chrome.headless: + print( + "๐Ÿ” Try running with visible browser window: uv run main.py --no-headless" + ) + # Only allow retry for credential errors if isinstance(error, InvalidCredentialsError): - print("โš ๏ธ Please check your email and password.") retry = inquirer.prompt( [ inquirer.Confirm( @@ -197,51 +251,12 @@ def handle_login_error(error: Exception) -> None: ] ) if retry and retry.get("retry", False): - # Clear credentials from keyring clear_credentials_from_keyring() print("โœ… Credentials cleared from keyring.") - print("๐Ÿ’ก Please restart the application to try with new credentials.") - print(" Example: uv run main.py --no-headless") - - elif isinstance(error, CaptchaRequiredError): - print("โš ๏ธ LinkedIn requires captcha verification.") - captcha_url = getattr(error, "captcha_url", str(error)) - print(f"๐Ÿ”— Please complete the captcha at: {captcha_url}") - if config.chrome.headless: - print( - "๐Ÿ” Try running with visible browser window to complete captcha: " - "uv run main.py --no-headless" - ) + print("๐Ÿ”„ Retrying with new credentials...") + return True - elif isinstance(error, SecurityChallengeError): - print("โš ๏ธ LinkedIn requires a security challenge.") - challenge_url = getattr(error, "challenge_url", str(error)) - print(f"๐Ÿ”— Please complete the security challenge at: {challenge_url}") - if config.chrome.headless: - print( - "๐Ÿ” Try running with visible browser window to complete challenge: " - "uv run main.py --no-headless" - ) - - elif isinstance(error, TwoFactorAuthError): - print("โš ๏ธ Two-factor authentication is required.") - print( - "๐Ÿ“ฑ Please confirm the login in your LinkedIn mobile app or enter the 2FA code." - ) - if config.chrome.headless: - print( - "๐Ÿ” Try running with visible browser window to complete 2FA: " - "uv run main.py --no-headless" - ) - - elif isinstance(error, RateLimitError): - print("โš ๏ธ Too many login attempts. Please wait before trying again.") - - elif isinstance(error, LoginTimeoutError): - print("โš ๏ธ Login timed out. Please check your network connection.") - - else: - print("โš ๏ธ An unexpected error occurred during login.") + return False def initialize_driver() -> None: @@ -277,11 +292,8 @@ def initialize_driver() -> None: if driver: print("โœ… Web driver initialized successfully") else: - if config.chrome.non_interactive: - raise DriverInitializationError( - "Failed to initialize web driver in non-interactive mode" - ) - print("โŒ Failed to initialize web driver.") + # Driver creation failed - always raise an error + raise DriverInitializationError("Failed to initialize web driver") except ( CaptchaRequiredError, InvalidCredentialsError, @@ -291,11 +303,8 @@ def initialize_driver() -> None: LoginTimeoutError, CredentialsNotFoundError, ) as e: - # In non-interactive mode, let the error propagate - if config.chrome.non_interactive: - raise e - # In interactive mode, handle gracefully - print(f"โŒ Error: {str(e)}") + # Always re-raise login-related errors so main.py can handle them + raise e except WebDriverException as e: if config.chrome.non_interactive: raise DriverInitializationError( diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 481de0ab..1e9d3d29 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -23,49 +23,34 @@ ) -def handle_linkedin_errors(func): +def handle_tool_error(exception: Exception, context: str = "") -> Dict[str, Any]: """ - Decorator to handle LinkedIn MCP errors consistently across all tools. - - This decorator wraps tool functions and converts exceptions into - structured error responses that MCP clients can understand. + Handle errors from tool functions and return structured responses. Args: - func: The tool function to wrap + exception: The exception that occurred + context: Context about which tool failed Returns: - The decorated function that returns structured error responses + Structured error response dictionary """ + return convert_exception_to_response(exception, context) - async def wrapper(*args, **kwargs): - try: - return await func(*args, **kwargs) - except Exception as e: - return convert_exception_to_response(e, func.__name__) - - return wrapper - -def handle_linkedin_errors_list(func): +def handle_tool_error_list( + exception: Exception, context: str = "" +) -> List[Dict[str, Any]]: """ - Decorator to handle LinkedIn MCP errors for functions that return lists. - - Similar to handle_linkedin_errors but returns errors in list format. + Handle errors from tool functions that return lists. Args: - func: The tool function to wrap + exception: The exception that occurred + context: Context about which tool failed Returns: - The decorated function that returns structured error responses in list format + List containing structured error response dictionary """ - - async def wrapper(*args, **kwargs): - try: - return await func(*args, **kwargs) - except Exception as e: - return convert_exception_to_list_response(e, func.__name__) - - return wrapper + return convert_exception_to_list_response(exception, context) def convert_exception_to_response( diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 52e7e699..65645497 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -10,7 +10,7 @@ from fastmcp import FastMCP from linkedin_scraper import Company -from linkedin_mcp_server.error_handler import handle_linkedin_errors, safe_get_driver +from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver def register_company_tools(mcp: FastMCP) -> None: @@ -22,7 +22,6 @@ def register_company_tools(mcp: FastMCP) -> None: """ @mcp.tool() - @handle_linkedin_errors async def get_company_profile( linkedin_url: str, get_employees: bool = False ) -> Dict[str, Any]: @@ -36,58 +35,61 @@ async def get_company_profile( Returns: Dict[str, Any]: Structured data from the company's profile """ - driver = safe_get_driver() - - print(f"๐Ÿข Scraping company: {linkedin_url}") - if get_employees: - print("โš ๏ธ Fetching employees may take a while...") - - company = Company( - linkedin_url, - driver=driver, - get_employees=get_employees, - close_on_complete=False, - ) - - # Convert showcase pages to structured dictionaries - showcase_pages: List[Dict[str, Any]] = [ - { - "name": page.name, - "linkedin_url": page.linkedin_url, - "followers": page.followers, + try: + driver = safe_get_driver() + + print(f"๐Ÿข Scraping company: {linkedin_url}") + if get_employees: + print("โš ๏ธ Fetching employees may take a while...") + + company = Company( + linkedin_url, + driver=driver, + get_employees=get_employees, + close_on_complete=False, + ) + + # Convert showcase pages to structured dictionaries + showcase_pages: List[Dict[str, Any]] = [ + { + "name": page.name, + "linkedin_url": page.linkedin_url, + "followers": page.followers, + } + for page in company.showcase_pages + ] + + # Convert affiliated companies to structured dictionaries + affiliated_companies: List[Dict[str, Any]] = [ + { + "name": affiliated.name, + "linkedin_url": affiliated.linkedin_url, + "followers": affiliated.followers, + } + for affiliated in company.affiliated_companies + ] + + # Build the result dictionary + result: Dict[str, Any] = { + "name": company.name, + "about_us": company.about_us, + "website": company.website, + "phone": company.phone, + "headquarters": company.headquarters, + "founded": company.founded, + "industry": company.industry, + "company_type": company.company_type, + "company_size": company.company_size, + "specialties": company.specialties, + "showcase_pages": showcase_pages, + "affiliated_companies": affiliated_companies, + "headcount": company.headcount, } - for page in company.showcase_pages - ] - - # Convert affiliated companies to structured dictionaries - affiliated_companies: List[Dict[str, Any]] = [ - { - "name": affiliated.name, - "linkedin_url": affiliated.linkedin_url, - "followers": affiliated.followers, - } - for affiliated in company.affiliated_companies - ] - - # Build the result dictionary - result: Dict[str, Any] = { - "name": company.name, - "about_us": company.about_us, - "website": company.website, - "phone": company.phone, - "headquarters": company.headquarters, - "founded": company.founded, - "industry": company.industry, - "company_type": company.company_type, - "company_size": company.company_size, - "specialties": company.specialties, - "showcase_pages": showcase_pages, - "affiliated_companies": affiliated_companies, - "headcount": company.headcount, - } - - # Add employees if requested and available - if get_employees and company.employees: - result["employees"] = company.employees - - return result + + # Add employees if requested and available + if get_employees and company.employees: + result["employees"] = company.employees + + return result + except Exception as e: + return handle_tool_error(e, "get_company_profile") diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 42e2005f..b0e41709 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -11,8 +11,8 @@ from linkedin_scraper import Job, JobSearch from linkedin_mcp_server.error_handler import ( - handle_linkedin_errors, - handle_linkedin_errors_list, + handle_tool_error, + handle_tool_error_list, safe_get_driver, ) @@ -26,7 +26,6 @@ def register_job_tools(mcp: FastMCP) -> None: """ @mcp.tool() - @handle_linkedin_errors async def get_job_details(job_url: str) -> Dict[str, Any]: """ Scrape job details from a LinkedIn job posting. @@ -44,16 +43,18 @@ async def get_job_details(job_url: str) -> Dict[str, Any]: Dict[str, Any]: Structured job data including title, company, location, posting date, application count, and job description (may be empty if content is protected) """ - driver = safe_get_driver() + try: + driver = safe_get_driver() - print(f"๐Ÿ’ผ Scraping job: {job_url}") - job = Job(job_url, driver=driver, close_on_complete=False) + print(f"๐Ÿ’ผ Scraping job: {job_url}") + job = Job(job_url, driver=driver, close_on_complete=False) - # Convert job object to a dictionary - return job.to_dict() + # Convert job object to a dictionary + return job.to_dict() + except Exception as e: + return handle_tool_error(e, "get_job_details") @mcp.tool() - @handle_linkedin_errors_list async def search_jobs(search_term: str) -> List[Dict[str, Any]]: """ Search for jobs on LinkedIn (Note: This tool has compatibility issues). @@ -64,17 +65,19 @@ async def search_jobs(search_term: str) -> List[Dict[str, Any]]: Returns: List[Dict[str, Any]]: List of job search results """ - driver = safe_get_driver() + try: + driver = safe_get_driver() - print(f"๐Ÿ” Searching jobs: {search_term}") - job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) - jobs = job_search.search(search_term) + print(f"๐Ÿ” Searching jobs: {search_term}") + job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) + jobs = job_search.search(search_term) - # Convert job objects to dictionaries - return [job.to_dict() for job in jobs] + # Convert job objects to dictionaries + return [job.to_dict() for job in jobs] + except Exception as e: + return handle_tool_error_list(e, "search_jobs") @mcp.tool() - @handle_linkedin_errors_list async def get_recommended_jobs() -> List[Dict[str, Any]]: """ Get recommended jobs from LinkedIn (Note: This tool has compatibility issues). @@ -82,16 +85,20 @@ async def get_recommended_jobs() -> List[Dict[str, Any]]: Returns: List[Dict[str, Any]]: List of recommended jobs """ - driver = safe_get_driver() - - print("๐Ÿ“‹ Getting recommended jobs") - job_search = JobSearch( - driver=driver, - close_on_complete=False, - scrape=False, - ) - - if job_search.recommended_jobs: - return [job.to_dict() for job in job_search.recommended_jobs] - else: - return [] + try: + driver = safe_get_driver() + + print("๐Ÿ“‹ Getting recommended jobs") + job_search = JobSearch( + driver=driver, + close_on_complete=False, + scrape=True, # Enable scraping to get recommended jobs + scrape_recommended_jobs=True, + ) + + if hasattr(job_search, "recommended_jobs") and job_search.recommended_jobs: + return [job.to_dict() for job in job_search.recommended_jobs] + else: + return [] + except Exception as e: + return handle_tool_error_list(e, "get_recommended_jobs") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index a6706d90..beea1a10 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -10,7 +10,7 @@ from fastmcp import FastMCP from linkedin_scraper import Person -from linkedin_mcp_server.error_handler import handle_linkedin_errors, safe_get_driver +from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver def register_person_tools(mcp: FastMCP) -> None: @@ -22,7 +22,6 @@ def register_person_tools(mcp: FastMCP) -> None: """ @mcp.tool() - @handle_linkedin_errors async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: """ Scrape a person's LinkedIn profile. @@ -33,66 +32,69 @@ async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: Returns: Dict[str, Any]: Structured data from the person's profile """ - driver = safe_get_driver() - - print(f"๐Ÿ” Scraping profile: {linkedin_url}") - person = Person(linkedin_url, driver=driver, close_on_complete=False) - - # Convert experiences to structured dictionaries - experiences: List[Dict[str, Any]] = [ - { - "position_title": exp.position_title, - "company": exp.institution_name, - "from_date": exp.from_date, - "to_date": exp.to_date, - "duration": exp.duration, - "location": exp.location, - "description": exp.description, + try: + driver = safe_get_driver() + + print(f"๐Ÿ” Scraping profile: {linkedin_url}") + person = Person(linkedin_url, driver=driver, close_on_complete=False) + + # Convert experiences to structured dictionaries + experiences: List[Dict[str, Any]] = [ + { + "position_title": exp.position_title, + "company": exp.institution_name, + "from_date": exp.from_date, + "to_date": exp.to_date, + "duration": exp.duration, + "location": exp.location, + "description": exp.description, + } + for exp in person.experiences + ] + + # Convert educations to structured dictionaries + educations: List[Dict[str, Any]] = [ + { + "institution": edu.institution_name, + "degree": edu.degree, + "from_date": edu.from_date, + "to_date": edu.to_date, + "description": edu.description, + } + for edu in person.educations + ] + + # Convert interests to list of titles + interests: List[str] = [interest.title for interest in person.interests] + + # Convert accomplishments to structured dictionaries + accomplishments: List[Dict[str, str]] = [ + {"category": acc.category, "title": acc.title} + for acc in person.accomplishments + ] + + # Convert contacts to structured dictionaries + contacts: List[Dict[str, str]] = [ + { + "name": contact.name, + "occupation": contact.occupation, + "url": contact.url, + } + for contact in person.contacts + ] + + # Return the complete profile data + return { + "name": person.name, + "about": person.about, + "experiences": experiences, + "educations": educations, + "interests": interests, + "accomplishments": accomplishments, + "contacts": contacts, + "company": person.company, + "job_title": person.job_title, + "open_to_work": getattr(person, "open_to_work", False), } - for exp in person.experiences - ] - - # Convert educations to structured dictionaries - educations: List[Dict[str, Any]] = [ - { - "institution": edu.institution_name, - "degree": edu.degree, - "from_date": edu.from_date, - "to_date": edu.to_date, - "description": edu.description, - } - for edu in person.educations - ] - - # Convert interests to list of titles - interests: List[str] = [interest.title for interest in person.interests] - - # Convert accomplishments to structured dictionaries - accomplishments: List[Dict[str, str]] = [ - {"category": acc.category, "title": acc.title} - for acc in person.accomplishments - ] - - # Convert contacts to structured dictionaries - contacts: List[Dict[str, str]] = [ - { - "name": contact.name, - "occupation": contact.occupation, - "url": contact.url, - } - for contact in person.contacts - ] - - # Return the complete profile data - return { - "name": person.name, - "about": person.about, - "experiences": experiences, - "educations": educations, - "interests": interests, - "accomplishments": accomplishments, - "contacts": contacts, - "company": person.company, - "job_title": person.job_title, - "open_to_work": getattr(person, "open_to_work", False), - } + except Exception as e: + return handle_tool_error(e, "get_person_profile") diff --git a/main.py b/main.py index 65b86987..ac02f0e7 100644 --- a/main.py +++ b/main.py @@ -16,6 +16,14 @@ from linkedin_mcp_server.drivers.chrome import initialize_driver from linkedin_mcp_server.exceptions import LinkedInMCPError from linkedin_mcp_server.logging_config import configure_logging +from linkedin_scraper.exceptions import ( + CaptchaRequiredError, + InvalidCredentialsError, + LoginTimeoutError, + RateLimitError, + SecurityChallengeError, + TwoFactorAuthError, +) from linkedin_mcp_server.server import create_mcp_server, shutdown_handler @@ -56,11 +64,26 @@ def main() -> None: # Initialize the driver with configuration (initialize driver checks for lazy init options) try: initialize_driver() - except LinkedInMCPError as e: + except ( + LinkedInMCPError, + CaptchaRequiredError, + InvalidCredentialsError, + SecurityChallengeError, + TwoFactorAuthError, + RateLimitError, + LoginTimeoutError, + ) as e: logger.error( f"Failed to initialize driver: {str(e)}", extra={"error_type": type(e).__name__, "error_details": str(e)}, ) + + # Always terminate if login fails and we're not using lazy initialization + if not config.server.lazy_init: + print(f"\nโŒ {str(e)}") + sys.exit(1) + + # In lazy init mode with non-interactive, still exit on error if config.chrome.non_interactive: sys.exit(1) else: diff --git a/uv.lock b/uv.lock index db7a201a..48ea821e 100644 --- a/uv.lock +++ b/uv.lock @@ -702,7 +702,7 @@ dev = [ [[package]] name = "linkedin-scraper" version = "2.11.5" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#88b22aa1cb15371a4460ed2f4988ac79ea4c200f" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#1d6ff82f8b0950b060529b12102a674cfabad1bb" } dependencies = [ { name = "lxml" }, { name = "python-dotenv" }, @@ -1377,7 +1377,7 @@ wheels = [ [[package]] name = "selenium" -version = "4.33.0" +version = "4.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1387,9 +1387,9 @@ dependencies = [ { name = "urllib3", extra = ["socks"] }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/7e/4145666dd275760b56d0123a9439915af167932dd6caa19b5f8b281ae297/selenium-4.33.0.tar.gz", hash = "sha256:d90974db95d2cdeb34d2fb1b13f03dc904f53e6c5d228745b0635ada10cd625d", size = 882387, upload-time = "2025-05-23T17:45:22.046Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/44/a6df7eae7fe929f18ffe08221fb05215ce991adc718bbe693a8d46ff09b7/selenium-4.34.0.tar.gz", hash = "sha256:8b7eb05a0ed22f9bb2187fd256c28630824ad01d8397b4e68bc0af7dabf26c80", size = 895790, upload-time = "2025-06-29T07:30:09.263Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/c0/092fde36918574e144613de73ba43c36ab8d31e7d36bb44c35261909452d/selenium-4.33.0-py3-none-any.whl", hash = "sha256:af9ea757813918bddfe05cc677bf63c8a0cd277ebf8474b3dd79caa5727fca85", size = 9370835, upload-time = "2025-05-23T17:45:19.448Z" }, + { url = "https://files.pythonhosted.org/packages/11/b3/6a043a6968f263e90537b48870f7366f91a6d4c5cc67e5b656311c98d0f5/selenium-4.34.0-py3-none-any.whl", hash = "sha256:fc3535cfd99a073c21bf9091519b48ed31b34bf2cbd132f62e8c732b2e815b2d", size = 9403599, upload-time = "2025-06-29T07:30:07.012Z" }, ] [[package]] @@ -1526,11 +1526,11 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.13.2" +version = "4.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, ] [[package]] From 6de5d1511838f020909be809dc8dd64a25b2284f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 13:33:57 -0400 Subject: [PATCH 117/565] fix(main): clarify setup mode condition for Claude config --- main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index ac02f0e7..50de5518 100644 --- a/main.py +++ b/main.py @@ -96,8 +96,8 @@ def main() -> None: if config.server.setup: transport = choose_transport_interactive() - # Print configuration for Claude if in setup mode - if config.server.setup: + # Print configuration for Claude if in setup mode and using stdio transport + if config.server.setup and transport == "stdio": print_claude_config() # Create and run the MCP server From ab08b722a4c46154381e7003a43771a99ea749be Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 13:38:49 -0400 Subject: [PATCH 118/565] docs(README): update tool status and known issues --- README.md | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 2848174b..0b237b95 100644 --- a/README.md +++ b/README.md @@ -24,18 +24,17 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ## Features & Tool Status -**Working Tools:** +**Current Status: All Tools Working** > [!TIP] > - **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections > - **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details > - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs +> - **Job Search** (`search_jobs`): Search for jobs with filters like location, keywords, and experience level +> - **Recommended Jobs** (`get_recommended_jobs`): Get personalized job recommendations based on your profile > - **Session Management** (`close_session`): Properly close browser session and clean up resources -**Known Issues: (should be fixed after this [PR](https://github.com/joeyism/linkedin_scraper/pull/252) is merged)** -> [!WARNING] -> - **Job Search** (`search_jobs`): Compatibility issues with LinkedIn's search interface -> - **Recommended Jobs** (`get_recommended_jobs`): Selenium method compatibility issues -> - **Company Profiles** (`get_company_profile`): Some companies can't be accessed / may return empty results (need further investigation) +> [!NOTE] +> All tools are currently functional and actively maintained. If you encounter any issues, please report them in the [GitHub issues](https://github.com/stickerdaniel/linkedin-mcp-server/issues). --- From b8e13c6aeeed7f2effc2622f108711829a8c185b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 14:28:14 -0400 Subject: [PATCH 119/565] refactor(logging): implement structured logging across modules --- .vscode/settings.json | 9 +++ linkedin_mcp_server/config/secrets.py | 10 +-- linkedin_mcp_server/drivers/chrome.py | 92 +++++++++++++++------------ linkedin_mcp_server/server.py | 12 +++- linkedin_mcp_server/tools/company.py | 7 +- linkedin_mcp_server/tools/job.py | 9 ++- linkedin_mcp_server/tools/person.py | 5 +- main.py | 23 ++++--- 8 files changed, 107 insertions(+), 60 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index d4683ead..585aad76 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,7 +13,16 @@ "source.organizeImports.ruff": "explicit" } }, + "python.defaultInterpreterPath": ".venv/bin/python", + "python.terminal.activateEnvironment": true, "yaml.schemas": { "https://www.schemastore.org/github-issue-config.json": "file:///Users/daniel/Documents/development/python/linkedin-mcp-server/.github/ISSUE_TEMPLATE/config.yml" }, + "cursorpyright.analysis.autoImportCompletions": true, + "cursorpyright.analysis.diagnosticMode": "workspace", + "cursorpyright.analysis.extraPaths": [ + "./linkedin_mcp_server" + ], + "cursorpyright.analysis.stubPath": "./linkedin_mcp_server", + "cursorpyright.analysis.typeCheckingMode": "off" } diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 2ce5fddc..bafe01c9 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -22,14 +22,14 @@ def get_credentials() -> Dict[str, str]: # First, try configuration (includes environment variables) if config.linkedin.email and config.linkedin.password: - print("Using LinkedIn credentials from configuration") + logger.info("Using LinkedIn credentials from configuration") return {"email": config.linkedin.email, "password": config.linkedin.password} # Second, try keyring if enabled if config.linkedin.use_keyring: credentials = get_credentials_from_keyring() if credentials["email"] and credentials["password"]: - print(f"Using LinkedIn credentials from {get_keyring_name()}") + logger.info(f"Using LinkedIn credentials from {get_keyring_name()}") return {"email": credentials["email"], "password": credentials["password"]} # If in non-interactive mode and no credentials found, raise error @@ -57,9 +57,9 @@ def prompt_for_credentials() -> Dict[str, str]: # Store credentials securely in keyring if save_credentials_to_keyring(credentials["email"], credentials["password"]): - print(f"โœ… Credentials stored securely in {get_keyring_name()}") + logger.info(f"Credentials stored securely in {get_keyring_name()}") else: - print("โš ๏ธ Warning: Could not store credentials in system keyring.") - print(" Your credentials will only be used for this session.") + logger.warning("Could not store credentials in system keyring.") + logger.info("Your credentials will only be used for this session.") return credentials diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index b0f14cad..3c21db2d 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -5,6 +5,7 @@ This module handles the creation and management of Chrome WebDriver instances. """ +import logging import os import sys from typing import Dict, Optional @@ -34,6 +35,8 @@ # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} +logger = logging.getLogger(__name__) + def get_or_create_driver() -> Optional[webdriver.Chrome]: """ @@ -55,8 +58,8 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: # Set up Chrome options chrome_options = Options() - print( - f"๐ŸŒ Running browser in {'headless' if config.chrome.headless else 'visible'} mode" + logger.info( + f"Running browser in {'headless' if config.chrome.headless else 'visible'} mode" ) if config.chrome.headless: chrome_options.add_argument("--headless=new") @@ -78,7 +81,7 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: # Initialize Chrome driver try: - print("๐ŸŒ Initializing Chrome WebDriver...") + logger.info("Initializing Chrome WebDriver...") # Use ChromeDriver path from environment or config chromedriver_path = ( @@ -86,14 +89,14 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: ) if chromedriver_path: - print(f"๐ŸŒ Using ChromeDriver at path: {chromedriver_path}") + logger.info(f"Using ChromeDriver at path: {chromedriver_path}") service = Service(executable_path=chromedriver_path) driver = webdriver.Chrome(service=service, options=chrome_options) else: - print("๐ŸŒ Using auto-detected ChromeDriver") + logger.info("Using auto-detected ChromeDriver") driver = webdriver.Chrome(options=chrome_options) - print("โœ… Chrome WebDriver initialized successfully") + logger.info("Chrome WebDriver initialized successfully") # Add a page load timeout for safety driver.set_page_load_timeout(60) @@ -103,7 +106,7 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: for attempt in range(max_retries): try: if login_to_linkedin(driver): - print("Successfully logged in to LinkedIn") + logger.info("Successfully logged in to LinkedIn") active_drivers[session_id] = driver return driver except ( @@ -123,15 +126,18 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: # In interactive mode, handle the error and potentially retry should_retry = handle_login_error(e) if should_retry and attempt < max_retries - 1: - print(f"๐Ÿ”„ Retry attempt {attempt + 2}/{max_retries}") + logger.info(f"Retry attempt {attempt + 2}/{max_retries}") continue else: # Clean up driver on final failure driver.quit() return None except Exception as e: - error_msg = f"๐Ÿ›‘ Error creating web driver: {e}" - print(error_msg) + error_msg = f"Error creating web driver: {e}" + logger.error( + error_msg, + extra={"exception_type": type(e).__name__, "exception_message": str(e)}, + ) if config.chrome.non_interactive: raise DriverInitializationError(error_msg) @@ -169,7 +175,7 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: raise CredentialsNotFoundError("No credentials available") # Login to LinkedIn using enhanced linkedin-scraper - print("๐Ÿ”‘ Logging in to LinkedIn...") + logger.info("Logging in to LinkedIn...") from linkedin_scraper import actions # type: ignore @@ -182,7 +188,7 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: interactive=not config.chrome.non_interactive, ) - print("โœ… Successfully logged in to LinkedIn") + logger.info("Successfully logged in to LinkedIn") return True except Exception: @@ -203,7 +209,7 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: elif "feed" in current_url or "mynetwork" in current_url: # Actually logged in successfully despite the exception - print("โœ… Successfully logged in to LinkedIn") + logger.info("Successfully logged in to LinkedIn") return True else: @@ -232,10 +238,10 @@ def handle_login_error(error: Exception) -> bool: """ config = get_config() - print(f"\nโŒ {str(error)}") + logger.error(f"\nโŒ {str(error)}") if config.chrome.headless: - print( + logger.info( "๐Ÿ” Try running with visible browser window: uv run main.py --no-headless" ) @@ -252,8 +258,8 @@ def handle_login_error(error: Exception) -> bool: ) if retry and retry.get("retry", False): clear_credentials_from_keyring() - print("โœ… Credentials cleared from keyring.") - print("๐Ÿ”„ Retrying with new credentials...") + logger.info("โœ… Credentials cleared from keyring.") + logger.info("๐Ÿ”„ Retrying with new credentials...") return True return False @@ -266,23 +272,25 @@ def initialize_driver() -> None: config = get_config() if config.server.lazy_init: - print("Using lazy initialization - driver will be created on first tool call") + logger.info( + "Using lazy initialization - driver will be created on first tool call" + ) if config.linkedin.email and config.linkedin.password: - print("LinkedIn credentials found in configuration") + logger.info("LinkedIn credentials found in configuration") else: - print( + logger.info( "No LinkedIn credentials found - will look for stored credentials on first use" ) return # Validate chromedriver can be found if config.chrome.chromedriver_path: - print(f"โœ… ChromeDriver found at: {config.chrome.chromedriver_path}") + logger.info(f"โœ… ChromeDriver found at: {config.chrome.chromedriver_path}") os.environ["CHROMEDRIVER"] = config.chrome.chromedriver_path else: - print("โš ๏ธ ChromeDriver not found in common locations.") - print("โšก Continuing with automatic detection...") - print( + logger.info("โš ๏ธ ChromeDriver not found in common locations.") + logger.info("โšก Continuing with automatic detection...") + logger.info( "๐Ÿ’ก Tip: install ChromeDriver and set the CHROMEDRIVER environment variable" ) @@ -290,7 +298,7 @@ def initialize_driver() -> None: try: driver = get_or_create_driver() if driver: - print("โœ… Web driver initialized successfully") + logger.info("โœ… Web driver initialized successfully") else: # Driver creation failed - always raise an error raise DriverInitializationError("Failed to initialize web driver") @@ -310,7 +318,7 @@ def initialize_driver() -> None: raise DriverInitializationError( f"Failed to initialize web driver: {str(e)}" ) - print(f"โŒ Failed to initialize web driver: {str(e)}") + logger.error(f"โŒ Failed to initialize web driver: {str(e)}") handle_driver_error() @@ -322,7 +330,9 @@ def handle_driver_error() -> None: # Skip interactive handling in non-interactive mode if config.chrome.non_interactive: - print("โŒ ChromeDriver is required for this application to work properly.") + logger.error( + "โŒ ChromeDriver is required for this application to work properly." + ) sys.exit(1) questions = [ @@ -347,24 +357,28 @@ def handle_driver_error() -> None: # Update config with the new path config.chrome.chromedriver_path = path os.environ["CHROMEDRIVER"] = path - print(f"โœ… ChromeDriver path set to: {path}") - print("๐Ÿ’ก Please restart the application to use the new ChromeDriver path.") - print(" Example: uv run main.py") + logger.info(f"โœ… ChromeDriver path set to: {path}") + logger.info( + "๐Ÿ’ก Please restart the application to use the new ChromeDriver path." + ) + logger.info(" Example: uv run main.py") sys.exit(0) else: - print(f"โš ๏ธ Warning: The specified path does not exist: {path}") - print("๐Ÿ’ก Please check the path and restart the application.") + logger.warning(f"โš ๏ธ Warning: The specified path does not exist: {path}") + logger.info("๐Ÿ’ก Please check the path and restart the application.") sys.exit(1) elif answers["chromedriver_action"] == "help": - print("\n๐Ÿ“‹ ChromeDriver Installation Guide:") - print("1. Find your Chrome version: Chrome menu > Help > About Google Chrome") - print( + logger.info("\n๐Ÿ“‹ ChromeDriver Installation Guide:") + logger.info( + "1. Find your Chrome version: Chrome menu > Help > About Google Chrome" + ) + logger.info( "2. Download matching ChromeDriver: https://chromedriver.chromium.org/downloads" ) - print("3. Place ChromeDriver in a location on your PATH") - print(" - macOS/Linux: /usr/local/bin/ is recommended") - print( + logger.info("3. Place ChromeDriver in a location on your PATH") + logger.info(" - macOS/Linux: /usr/local/bin/ is recommended") + logger.info( " - Windows: Add to a directory in your PATH or specify the full path\n" ) @@ -373,5 +387,5 @@ def handle_driver_error() -> None: )["try_again"]: initialize_driver() - print("โŒ ChromeDriver is required for this application to work properly.") + logger.error("โŒ ChromeDriver is required for this application to work properly.") sys.exit(1) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 3e746cd2..c99afdda 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -5,6 +5,7 @@ This module creates the MCP server and registers all the LinkedIn tools. """ +import logging from typing import Any, Dict from fastmcp import FastMCP @@ -14,6 +15,8 @@ from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools +logger = logging.getLogger(__name__) + def create_mcp_server() -> FastMCP: """Create and configure the MCP server with all LinkedIn tools.""" @@ -59,4 +62,11 @@ def shutdown_handler() -> None: driver.quit() del active_drivers[session_id] except Exception as e: - print(f"โŒ Error closing driver during shutdown: {e}") + logger.error( + f"Error closing driver during shutdown: {e}", + extra={ + "session_id": session_id, + "exception_type": type(e).__name__, + "exception_message": str(e), + }, + ) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 65645497..f64879be 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -5,6 +5,7 @@ This module provides tools for scraping LinkedIn company profiles. """ +import logging from typing import Any, Dict, List from fastmcp import FastMCP @@ -12,6 +13,8 @@ from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver +logger = logging.getLogger(__name__) + def register_company_tools(mcp: FastMCP) -> None: """ @@ -38,9 +41,9 @@ async def get_company_profile( try: driver = safe_get_driver() - print(f"๐Ÿข Scraping company: {linkedin_url}") + logger.info(f"Scraping company: {linkedin_url}") if get_employees: - print("โš ๏ธ Fetching employees may take a while...") + logger.info("Fetching employees may take a while...") company = Company( linkedin_url, diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index b0e41709..05a8f310 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -5,6 +5,7 @@ This module provides tools for scraping LinkedIn job postings and searches. """ +import logging from typing import Any, Dict, List from fastmcp import FastMCP @@ -16,6 +17,8 @@ safe_get_driver, ) +logger = logging.getLogger(__name__) + def register_job_tools(mcp: FastMCP) -> None: """ @@ -46,7 +49,7 @@ async def get_job_details(job_url: str) -> Dict[str, Any]: try: driver = safe_get_driver() - print(f"๐Ÿ’ผ Scraping job: {job_url}") + logger.info(f"Scraping job: {job_url}") job = Job(job_url, driver=driver, close_on_complete=False) # Convert job object to a dictionary @@ -68,7 +71,7 @@ async def search_jobs(search_term: str) -> List[Dict[str, Any]]: try: driver = safe_get_driver() - print(f"๐Ÿ” Searching jobs: {search_term}") + logger.info(f"Searching jobs: {search_term}") job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) jobs = job_search.search(search_term) @@ -88,7 +91,7 @@ async def get_recommended_jobs() -> List[Dict[str, Any]]: try: driver = safe_get_driver() - print("๐Ÿ“‹ Getting recommended jobs") + logger.info("Getting recommended jobs") job_search = JobSearch( driver=driver, close_on_complete=False, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index beea1a10..f1f6d706 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -5,6 +5,7 @@ This module provides tools for scraping LinkedIn person profiles. """ +import logging from typing import Any, Dict, List from fastmcp import FastMCP @@ -12,6 +13,8 @@ from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver +logger = logging.getLogger(__name__) + def register_person_tools(mcp: FastMCP) -> None: """ @@ -35,7 +38,7 @@ async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: try: driver = safe_get_driver() - print(f"๐Ÿ” Scraping profile: {linkedin_url}") + logger.info(f"Scraping profile: {linkedin_url}") person = Person(linkedin_url, driver=driver, close_on_complete=False) # Convert experiences to structured dictionaries diff --git a/main.py b/main.py index 50de5518..d0703257 100644 --- a/main.py +++ b/main.py @@ -8,14 +8,6 @@ from typing import Literal import inquirer # type: ignore - -from linkedin_mcp_server.cli import print_claude_config - -# Import the new centralized configuration -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.drivers.chrome import initialize_driver -from linkedin_mcp_server.exceptions import LinkedInMCPError -from linkedin_mcp_server.logging_config import configure_logging from linkedin_scraper.exceptions import ( CaptchaRequiredError, InvalidCredentialsError, @@ -24,8 +16,18 @@ SecurityChallengeError, TwoFactorAuthError, ) + +from linkedin_mcp_server.cli import print_claude_config + +# Import the new centralized configuration +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.drivers.chrome import initialize_driver +from linkedin_mcp_server.exceptions import LinkedInMCPError +from linkedin_mcp_server.logging_config import configure_logging from linkedin_mcp_server.server import create_mcp_server, shutdown_handler +logger = logging.getLogger(__name__) + def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: """Prompt user for transport mode using inquirer.""" @@ -58,7 +60,6 @@ def main() -> None: json_format=config.chrome.non_interactive, # Use JSON format in non-interactive mode ) - logger = logging.getLogger("linkedin_mcp_server") logger.debug(f"Server configuration: {config}") # Initialize the driver with configuration (initialize driver checks for lazy init options) @@ -132,5 +133,9 @@ def exit_gracefully(exit_code: int = 0) -> None: except KeyboardInterrupt: exit_gracefully(0) except Exception as e: + logger.error( + f"Error running MCP server: {e}", + extra={"exception_type": type(e).__name__, "exception_message": str(e)}, + ) print(f"โŒ Error running MCP server: {e}") exit_gracefully(1) From 4595155d58f0f7a93ce590d5bd39623126fe179e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 14:37:54 -0400 Subject: [PATCH 120/565] feat(logging): add compact formatter for log records --- linkedin_mcp_server/logging_config.py | 39 ++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index d54dcb61..98616ede 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -42,6 +42,41 @@ def format(self, record: logging.LogRecord) -> str: return json.dumps(log_data) +class CompactFormatter(logging.Formatter): + """Compact formatter that shortens logger names and uses shorter timestamps.""" + + def format(self, record: logging.LogRecord) -> str: + """Format log record with compact formatting. + + Args: + record: The log record to format + + Returns: + Compact-formatted log string + """ + # Create a copy of the record to avoid modifying the original + record_copy = logging.LogRecord( + name=record.name, + level=record.levelno, + pathname=record.pathname, + lineno=record.lineno, + msg=record.msg, + args=record.args, + exc_info=record.exc_info, + func=record.funcName, + ) + record_copy.stack_info = record.stack_info + + # Shorten the logger name by removing the linkedin_mcp_server prefix + if record_copy.name.startswith("linkedin_mcp_server."): + record_copy.name = record_copy.name[len("linkedin_mcp_server.") :] + + # Format the time as HH:MM:SS only + record_copy.asctime = self.formatTime(record_copy, datefmt="%H:%M:%S") + + return f"{record_copy.asctime} - {record_copy.name} - {record.levelname} - {record.getMessage()}" + + def configure_logging(debug: bool = False, json_format: bool = False) -> None: """Configure logging for the LinkedIn MCP Server. @@ -54,9 +89,7 @@ def configure_logging(debug: bool = False, json_format: bool = False) -> None: if json_format: formatter = MCPJSONFormatter() else: - formatter = logging.Formatter( - "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) + formatter = CompactFormatter() # Configure root logger root_logger = logging.getLogger() From 811554166857ede5b1c49fdbdaa10bda6c6f9fdd Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 14:40:08 -0400 Subject: [PATCH 121/565] chore(version): bump version to 1.0.8 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 72572f6b..f0f049b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.7" +version = "1.0.8" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 48ea821e..527cc634 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.7" +version = "1.0.8" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From f66bfd1c53708057c563df67f0a1d38b0ded3b4a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 5 Jul 2025 18:40:33 +0000 Subject: [PATCH 122/565] chore(dxt): update manifest.json version to v1.0.8 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 5f45a5e2..7f49b0d0 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.7", + "version": "1.0.8", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 7878e0bf46b898a02fb56149c3ea85bcf3a61d75 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sat, 5 Jul 2025 14:47:42 -0400 Subject: [PATCH 123/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0b237b95..6c130639 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > - **Session Management** (`close_session`): Properly close browser session and clean up resources > [!NOTE] -> All tools are currently functional and actively maintained. If you encounter any issues, please report them in the [GitHub issues](https://github.com/stickerdaniel/linkedin-mcp-server/issues). +> July 2025: All tools are currently functional and actively maintained. If you encounter any issues, please report them in the [GitHub issues](https://github.com/stickerdaniel/linkedin-mcp-server/issues). --- From 84cf29f0da8eb548d19ea79ecf8b91d14b26229b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 5 Jul 2025 16:34:34 -0400 Subject: [PATCH 124/565] feat(cookie): implement LinkedIn cookie extraction and usage Add functionality to extract LinkedIn session cookies for Docker setup. Introduce a new command-line flag `--get-cookie` to facilitate cookie retrieval. Update configuration to support cookie management via environment variables and keyring. Enhance login process to prioritize cookie authentication, falling back to credentials if necessary. Update manifest and schema to reflect changes in cookie handling. --- linkedin_mcp_server/config/loaders.py | 22 +++ linkedin_mcp_server/config/providers.py | 22 +++ linkedin_mcp_server/config/schema.py | 2 + linkedin_mcp_server/config/secrets.py | 91 +++++++++++- linkedin_mcp_server/drivers/chrome.py | 181 ++++++++++++++++++++++-- main.py | 60 ++++++++ manifest.json | 16 +-- 7 files changed, 370 insertions(+), 24 deletions(-) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index ca1ea276..87735cc5 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -33,6 +33,9 @@ def load_from_env(config: AppConfig) -> AppConfig: if password := os.environ.get("LINKEDIN_PASSWORD"): config.linkedin.password = password + if cookie := os.environ.get("LINKEDIN_COOKIE"): + config.linkedin.cookie = cookie + # ChromeDriver configuration if chromedriver := os.environ.get("CHROMEDRIVER"): config.chrome.chromedriver_path = chromedriver @@ -124,6 +127,18 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Specify the path to the ChromeDriver executable", ) + parser.add_argument( + "--get-cookie", + action="store_true", + help="Login with credentials and display cookie for Docker setup", + ) + + parser.add_argument( + "--cookie", + type=str, + help="Specify LinkedIn cookie directly", + ) + args = parser.parse_args() # Update configuration with parsed arguments @@ -157,6 +172,13 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.chromedriver: config.chrome.chromedriver_path = args.chromedriver + if hasattr(args, "get_cookie") and args.get_cookie: + config.server.get_cookie = True + config.chrome.non_interactive = True + + if args.cookie: + config.linkedin.cookie = args.cookie + return config diff --git a/linkedin_mcp_server/config/providers.py b/linkedin_mcp_server/config/providers.py index 31a39779..aab30543 100644 --- a/linkedin_mcp_server/config/providers.py +++ b/linkedin_mcp_server/config/providers.py @@ -10,6 +10,7 @@ SERVICE_NAME = "linkedin_mcp_server" EMAIL_KEY = "linkedin_email" PASSWORD_KEY = "linkedin_password" +COOKIE_KEY = "linkedin_cookie" logger = logging.getLogger(__name__) @@ -74,6 +75,27 @@ def clear_credentials_from_keyring() -> bool: return False +def get_cookie_from_keyring() -> Optional[str]: + """Retrieve LinkedIn cookie from system keyring.""" + return get_secret_from_keyring(COOKIE_KEY) + + +def save_cookie_to_keyring(cookie: str) -> bool: + """Save LinkedIn cookie to system keyring.""" + return set_secret_in_keyring(COOKIE_KEY, cookie) + + +def clear_cookie_from_keyring() -> bool: + """Clear stored cookie from the keyring.""" + try: + keyring.delete_password(SERVICE_NAME, COOKIE_KEY) + logger.info(f"Cookie removed from {get_keyring_name()}") + return True + except KeyringError as e: + logger.error(f"Error clearing cookie: {e}") + return False + + def get_chromedriver_paths() -> List[str]: """Get possible ChromeDriver paths based on the platform.""" paths = [ diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 55d912f5..cf3e0129 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -19,6 +19,7 @@ class LinkedInConfig: email: Optional[str] = None password: Optional[str] = None + cookie: Optional[str] = None use_keyring: bool = True @@ -30,6 +31,7 @@ class ServerConfig: lazy_init: bool = True debug: bool = False setup: bool = True + get_cookie: bool = False # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index bafe01c9..8857e40a 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -11,13 +11,59 @@ get_credentials_from_keyring, get_keyring_name, save_credentials_to_keyring, + get_cookie_from_keyring, + save_cookie_to_keyring, ) logger = logging.getLogger(__name__) +def has_authentication() -> bool: + """Check if authentication is available without triggering interactive setup.""" + config = get_config() + + # Check environment variable + if config.linkedin.cookie: + return True + + # Check keyring if enabled + if config.linkedin.use_keyring: + cookie = get_cookie_from_keyring() + if cookie: + return True + + return False + + +def get_authentication() -> str: + """Get LinkedIn cookie from keyring, environment, or interactive setup.""" + config = get_config() + + # First, try environment variable + if config.linkedin.cookie: + logger.info("Using LinkedIn cookie from environment") + return config.linkedin.cookie + + # Second, try keyring if enabled + if config.linkedin.use_keyring: + cookie = get_cookie_from_keyring() + if cookie: + logger.info(f"Using LinkedIn cookie from {get_keyring_name()}") + return cookie + + # If in non-interactive mode and no cookie found, raise error + if config.chrome.non_interactive: + raise CredentialsNotFoundError( + "No LinkedIn cookie found. Please provide cookie via " + "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." + ) + + # Otherwise, prompt for cookie or setup + return prompt_for_authentication() + + def get_credentials() -> Dict[str, str]: - """Get LinkedIn credentials from config, keyring, or prompt.""" + """Get LinkedIn credentials from config, keyring, or prompt (legacy for --get-cookie).""" config = get_config() # First, try configuration (includes environment variables) @@ -43,6 +89,49 @@ def get_credentials() -> Dict[str, str]: return prompt_for_credentials() +def prompt_for_authentication() -> str: + """Prompt user for LinkedIn cookie or setup via login.""" + print("๐Ÿ”— LinkedIn MCP Server Setup") + + # Ask if user has a cookie + has_cookie = inquirer.confirm("Do you have a LinkedIn cookie?", default=False) + + if has_cookie: + cookie = inquirer.text("LinkedIn Cookie", validate=lambda _, x: len(x) > 10) + if save_cookie_to_keyring(cookie): + logger.info(f"Cookie stored securely in {get_keyring_name()}") + else: + logger.warning("Could not store cookie in system keyring.") + logger.info("Your cookie will only be used for this session.") + return cookie + else: + # Login flow to get cookie + return setup_cookie_from_login() + + +def setup_cookie_from_login() -> str: + """Login with credentials and capture cookie.""" + from linkedin_mcp_server.drivers.chrome import setup_driver_for_cookie_capture + + print("๐Ÿ”‘ LinkedIn login required to obtain cookie") + credentials = prompt_for_credentials() + + # Use special driver setup for cookie capture + cookie = setup_driver_for_cookie_capture( + credentials["email"], credentials["password"] + ) + + if cookie: + if save_cookie_to_keyring(cookie): + logger.info(f"Cookie stored securely in {get_keyring_name()}") + else: + logger.warning("Could not store cookie in system keyring.") + logger.info("Your cookie will only be used for this session.") + return cookie + else: + raise CredentialsNotFoundError("Failed to obtain LinkedIn cookie") + + def prompt_for_credentials() -> Dict[str, str]: """Prompt user for LinkedIn credentials and store them securely.""" print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 3c21db2d..2842638d 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -25,8 +25,15 @@ from selenium.webdriver.chrome.service import Service from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.providers import clear_credentials_from_keyring -from linkedin_mcp_server.config.secrets import get_credentials +from linkedin_mcp_server.config.providers import ( + clear_credentials_from_keyring, + clear_cookie_from_keyring, +) +from linkedin_mcp_server.config.secrets import ( + get_authentication, + get_credentials, + has_authentication, +) from linkedin_mcp_server.exceptions import ( CredentialsNotFoundError, DriverInitializationError, @@ -145,9 +152,116 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: raise WebDriverException(error_msg) +def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: + """ + Log in to LinkedIn using session cookie. + + Args: + driver: Chrome WebDriver instance + cookie: LinkedIn session cookie + + Returns: + bool: True if login was successful, False otherwise + """ + try: + from linkedin_scraper import actions # type: ignore + + # Use linkedin-scraper cookie login + actions.login(driver, cookie=cookie) + + # Verify login by checking current URL + current_url = driver.current_url + if ( + "feed" in current_url + or "mynetwork" in current_url + or "linkedin.com/in/" in current_url + ): + return True + else: + return False + except Exception as e: + logger.warning(f"Cookie authentication failed: {e}") + return False + + +def capture_session_cookie(driver: webdriver.Chrome) -> Optional[str]: + """ + Capture LinkedIn session cookie from driver. + + Args: + driver: Chrome WebDriver instance + + Returns: + Optional[str]: Session cookie if found, None otherwise + """ + try: + # Get li_at cookie which is the main LinkedIn session cookie + cookie = driver.get_cookie("li_at") + if cookie and cookie.get("value"): + return f"li_at={cookie['value']}" + return None + except Exception as e: + logger.warning(f"Failed to capture session cookie: {e}") + return None + + +def setup_driver_for_cookie_capture(email: str, password: str) -> Optional[str]: + """ + Setup a temporary driver to login and capture cookie. + + Args: + email: LinkedIn email + password: LinkedIn password + + Returns: + Optional[str]: Captured cookie if successful, None otherwise + """ + config = get_config() + + # Set up Chrome options for cookie capture + chrome_options = Options() + if config.chrome.headless: + chrome_options.add_argument("--headless=new") + + # Add essential options + chrome_options.add_argument("--no-sandbox") + chrome_options.add_argument("--disable-dev-shm-usage") + chrome_options.add_argument("--disable-gpu") + chrome_options.add_argument("--window-size=1920,1080") + + try: + # Create temporary driver + driver = webdriver.Chrome(options=chrome_options) + driver.set_page_load_timeout(60) + + # Login using linkedin-scraper + from linkedin_scraper import actions # type: ignore + + actions.login( + driver, + email, + password, + interactive=not config.chrome.non_interactive, + ) + + # Capture cookie + cookie = capture_session_cookie(driver) + + # Clean up + driver.quit() + + return cookie + + except Exception as e: + logger.error(f"Failed to capture cookie: {e}") + if "driver" in locals(): + driver.quit() + return None + + def login_to_linkedin(driver: webdriver.Chrome) -> bool: """ - Log in to LinkedIn using stored or provided credentials. + Log in to LinkedIn using cookie-first authentication. Args: driver: Chrome WebDriver instance @@ -160,7 +274,27 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: """ config = get_config() - # Get LinkedIn credentials from config + # Try cookie authentication first + try: + cookie = get_authentication() + if login_with_cookie(driver, cookie): + logger.info("Successfully logged in to LinkedIn using cookie") + return True + else: + # Cookie login failed - clear invalid cookie from keyring + logger.warning( + "Cookie authentication failed - cookie may be expired or invalid" + ) + clear_cookie_from_keyring() + except CredentialsNotFoundError: + # No cookie available, fall back to credentials + pass + except Exception as e: + logger.warning(f"Cookie authentication failed: {e}") + # Clear invalid cookie from keyring + clear_cookie_from_keyring() + + # Fallback to credential-based login try: credentials = get_credentials() except CredentialsNotFoundError as e: @@ -172,10 +306,10 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: credentials = prompt_for_credentials() if not credentials: - raise CredentialsNotFoundError("No credentials available") + raise CredentialsNotFoundError("No authentication method available") # Login to LinkedIn using enhanced linkedin-scraper - logger.info("Logging in to LinkedIn...") + logger.info("Logging in to LinkedIn with credentials...") from linkedin_scraper import actions # type: ignore @@ -189,6 +323,15 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: ) logger.info("Successfully logged in to LinkedIn") + + # Capture cookie for future use + cookie = capture_session_cookie(driver) + if cookie: + from linkedin_mcp_server.config.providers import save_cookie_to_keyring + + save_cookie_to_keyring(cookie) + logger.info("Session cookie captured and stored") + return True except Exception: @@ -210,6 +353,15 @@ def login_to_linkedin(driver: webdriver.Chrome) -> bool: elif "feed" in current_url or "mynetwork" in current_url: # Actually logged in successfully despite the exception logger.info("Successfully logged in to LinkedIn") + + # Capture cookie for future use + cookie = capture_session_cookie(driver) + if cookie: + from linkedin_mcp_server.config.providers import save_cookie_to_keyring + + save_cookie_to_keyring(cookie) + logger.info("Session cookie captured and stored") + return True else: @@ -275,14 +427,21 @@ def initialize_driver() -> None: logger.info( "Using lazy initialization - driver will be created on first tool call" ) - if config.linkedin.email and config.linkedin.password: - logger.info("LinkedIn credentials found in configuration") + if has_authentication(): + logger.info("LinkedIn authentication found in configuration") else: - logger.info( - "No LinkedIn credentials found - will look for stored credentials on first use" - ) + logger.info("No LinkedIn authentication found - will set up on first use") return + # Pre-check authentication availability to trigger setup if needed + if not config.chrome.non_interactive and not has_authentication(): + # In interactive mode without authentication, trigger setup first + logger.info("Setting up LinkedIn authentication...") + try: + get_authentication() # This will trigger the interactive setup + except CredentialsNotFoundError: + pass # Setup was cancelled or failed, continue to driver creation + # Validate chromedriver can be found if config.chrome.chromedriver_path: logger.info(f"โœ… ChromeDriver found at: {config.chrome.chromedriver_path}") diff --git a/main.py b/main.py index d0703257..747eadc4 100644 --- a/main.py +++ b/main.py @@ -46,6 +46,62 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: return answers["transport"] +def get_cookie_and_exit() -> None: + """Get LinkedIn cookie and exit (for Docker setup).""" + print("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction ๐Ÿ”—") + print("=" * 50) + + config = get_config() + + # Configure logging + configure_logging( + debug=config.server.debug, + json_format=config.chrome.non_interactive, + ) + + try: + from linkedin_mcp_server.config.secrets import get_credentials + from linkedin_mcp_server.drivers.chrome import setup_driver_for_cookie_capture + + # Get credentials + credentials = get_credentials() + + print("๐Ÿ”‘ Logging in to LinkedIn...") + cookie = setup_driver_for_cookie_capture( + credentials["email"], credentials["password"] + ) + + if cookie: + print("โœ… Login successful!") + print(f"๐Ÿช LinkedIn Cookie: {cookie}") + + # Try to copy to clipboard + try: + import pyperclip + + pyperclip.copy(cookie) + print("๐Ÿ“‹ Cookie copied to clipboard!") + except Exception as e: + logger.warning(f"Could not copy to clipboard: {e}") + print("โš ๏ธ Copy the cookie above manually") + + print("\n๐Ÿ“ Usage:") + print("1. Copy the cookie above") + print("2. Set LINKEDIN_COOKIE environment variable in your Docker setup") + print("3. Or paste into Claude Desktop configuration") + + else: + print("โŒ Failed to obtain cookie") + sys.exit(1) + + except Exception as e: + logger.error(f"Error getting cookie: {e}") + print(f"โŒ Error getting cookie: {e}") + sys.exit(1) + + sys.exit(0) + + def main() -> None: """Initialize and run the LinkedIn MCP server.""" print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") @@ -54,6 +110,10 @@ def main() -> None: # Get configuration using the new centralized system config = get_config() + # Handle --get-cookie flag + if config.server.get_cookie: + get_cookie_and_exit() + # Configure logging configure_logging( debug=config.server.debug, diff --git a/manifest.json b/manifest.json index 7f49b0d0..0311c434 100644 --- a/manifest.json +++ b/manifest.json @@ -24,8 +24,7 @@ "command": "docker", "args": [ "run", "-i", "--rm", - "-e", "LINKEDIN_EMAIL=${user_config.linkedin_email}", - "-e", "LINKEDIN_PASSWORD=${user_config.linkedin_password}", + "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "stickerdaniel/linkedin-mcp-server" ] } @@ -84,16 +83,9 @@ } ], "user_config": { - "linkedin_email": { - "title": "LinkedIn Email", - "description": "Your LinkedIn account email address", - "type": "string", - "required": true, - "sensitive": false - }, - "linkedin_password": { - "title": "LinkedIn Password", - "description": "Your LinkedIn account password", + "linkedin_cookie": { + "title": "LinkedIn Cookie", + "description": "LinkedIn session cookie. Run 'docker run -it --rm -e LINKEDIN_EMAIL=your@email.com -e LINKEDIN_PASSWORD=yourpass stickerdaniel/linkedin-mcp-server --get-cookie' to obtain", "type": "string", "required": true, "sensitive": true From 5a3471fedc5e242bc3f360e92f4d704fa4bb9536 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 04:56:53 -0400 Subject: [PATCH 125/565] feat(authentication): implement phased authentication setup Introduce a structured approach to LinkedIn authentication with clear phase separation: Authentication Setup, Driver Management, and Server Runtime. Enhance error handling and logging during the authentication process. Update driver initialization to utilize session cookies effectively, improving overall user experience and reliability. Refactor existing code to streamline driver management and ensure proper cleanup of resources. Update error messages for clarity and consistency. --- linkedin_mcp_server/authentication.py | 171 ++++++++ linkedin_mcp_server/config/__init__.py | 9 +- linkedin_mcp_server/config/loaders.py | 5 +- linkedin_mcp_server/config/secrets.py | 6 +- linkedin_mcp_server/drivers/chrome.py | 552 +++++++------------------ linkedin_mcp_server/error_handler.py | 17 +- linkedin_mcp_server/logging_config.py | 10 +- linkedin_mcp_server/setup.py | 337 +++++++++++++++ main.py | 305 ++++++++++---- uv.lock | 2 +- 10 files changed, 900 insertions(+), 514 deletions(-) create mode 100644 linkedin_mcp_server/authentication.py create mode 100644 linkedin_mcp_server/setup.py diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py new file mode 100644 index 00000000..fb3ea828 --- /dev/null +++ b/linkedin_mcp_server/authentication.py @@ -0,0 +1,171 @@ +# linkedin_mcp_server/authentication.py +""" +Pure authentication module for LinkedIn MCP Server. + +This module handles authentication without any driver dependencies. +""" + +import logging + +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.providers import ( + get_cookie_from_keyring, + save_cookie_to_keyring, + clear_cookie_from_keyring, +) +from linkedin_mcp_server.exceptions import CredentialsNotFoundError + +# Constants for cookie validation +MIN_COOKIE_LENGTH = 20 +MIN_RAW_COOKIE_LENGTH = 10 + +logger = logging.getLogger(__name__) + + +def has_authentication() -> bool: + """ + Check if authentication is available without triggering setup. + + Returns: + bool: True if authentication (cookie) is available, False otherwise + """ + config = get_config() + + # Check environment variable + if config.linkedin.cookie: + return True + + # Check keyring if enabled + if config.linkedin.use_keyring: + cookie = get_cookie_from_keyring() + if cookie: + return True + + return False + + +def get_authentication() -> str: + """ + Get LinkedIn cookie from available sources. + + Returns: + str: LinkedIn session cookie + + Raises: + CredentialsNotFoundError: If no authentication is available + """ + config = get_config() + + # First, try environment variable + if config.linkedin.cookie: + logger.info("Using LinkedIn cookie from environment") + return config.linkedin.cookie + + # Second, try keyring if enabled + if config.linkedin.use_keyring: + cookie = get_cookie_from_keyring() + if cookie: + logger.info("Using LinkedIn cookie from keyring") + return cookie + + # No authentication available + raise CredentialsNotFoundError("No LinkedIn cookie found") + + +def store_authentication(cookie: str) -> bool: + """ + Store LinkedIn cookie securely. + + Args: + cookie: LinkedIn session cookie to store + + Returns: + bool: True if storage was successful, False otherwise + """ + config = get_config() + + if config.linkedin.use_keyring: + success = save_cookie_to_keyring(cookie) + if success: + logger.info("Cookie stored securely in keyring") + else: + logger.warning("Could not store cookie in system keyring") + return success + else: + logger.info("Keyring disabled, cookie not stored") + return False + + +def clear_authentication() -> bool: + """ + Clear stored authentication. + + Returns: + bool: True if clearing was successful, False otherwise + """ + config = get_config() + + if config.linkedin.use_keyring: + success = clear_cookie_from_keyring() + if success: + logger.info("Authentication cleared from keyring") + else: + logger.warning("Could not clear authentication from keyring") + return success + else: + logger.info("Keyring disabled, nothing to clear") + return True + + +def validate_cookie_format(cookie: str) -> bool: + """ + Validate that the cookie has the expected format. + + Args: + cookie: Cookie string to validate + + Returns: + bool: True if cookie format is valid, False otherwise + """ + if not cookie: + return False + + # LinkedIn session cookies typically start with "li_at=" + if cookie.startswith("li_at=") and len(cookie) > MIN_COOKIE_LENGTH: + return True + + # Also accept raw cookie values (without li_at= prefix) + if ( + not cookie.startswith("li_at=") + and len(cookie) > MIN_RAW_COOKIE_LENGTH + and "=" not in cookie + ): + return True + + return False + + +def ensure_authentication() -> str: + """ + Ensure authentication is available, raising clear error if not. + + Returns: + str: LinkedIn session cookie + + Raises: + CredentialsNotFoundError: If no authentication is available with clear instructions + """ + try: + return get_authentication() + except CredentialsNotFoundError: + config = get_config() + + if config.chrome.non_interactive: + raise CredentialsNotFoundError( + "No LinkedIn cookie found. Please provide cookie via " + "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." + ) + else: + raise CredentialsNotFoundError( + "No LinkedIn authentication found. Please run setup to configure authentication." + ) diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index caa61510..a79d7eef 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -1,14 +1,15 @@ # src/linkedin_mcp_server/config/__init__.py -from typing import Optional import logging -from .schema import AppConfig, ChromeConfig, LinkedInConfig, ServerConfig +from typing import Optional + from .loaders import load_config from .providers import ( - get_credentials_from_keyring, - save_credentials_to_keyring, clear_credentials_from_keyring, + get_credentials_from_keyring, get_keyring_name, + save_credentials_to_keyring, ) +from .schema import AppConfig, ChromeConfig, LinkedInConfig, ServerConfig logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 87735cc5..2143ae71 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -1,10 +1,11 @@ # src/linkedin_mcp_server/config/loaders.py -import os import argparse import logging +import os from typing import Optional -from .schema import AppConfig + from .providers import get_chromedriver_paths +from .schema import AppConfig logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 8857e40a..a5071aab 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -111,13 +111,13 @@ def prompt_for_authentication() -> str: def setup_cookie_from_login() -> str: """Login with credentials and capture cookie.""" - from linkedin_mcp_server.drivers.chrome import setup_driver_for_cookie_capture + from linkedin_mcp_server.setup import capture_cookie_from_credentials print("๐Ÿ”‘ LinkedIn login required to obtain cookie") credentials = prompt_for_credentials() - # Use special driver setup for cookie capture - cookie = setup_driver_for_cookie_capture( + # Use existing cookie capture functionality + cookie = capture_cookie_from_credentials( credentials["email"], credentials["password"] ) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 2842638d..7b6243cd 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -1,16 +1,15 @@ -# src/linkedin_mcp_server/drivers/chrome.py +# linkedin_mcp_server/drivers/chrome.py """ Chrome driver management for LinkedIn scraping. This module handles the creation and management of Chrome WebDriver instances. +Simplified to focus only on driver management without authentication setup. """ import logging import os -import sys from typing import Dict, Optional -import inquirer # type: ignore from linkedin_scraper.exceptions import ( CaptchaRequiredError, InvalidCredentialsError, @@ -25,19 +24,10 @@ from selenium.webdriver.chrome.service import Service from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.providers import ( - clear_credentials_from_keyring, - clear_cookie_from_keyring, -) -from linkedin_mcp_server.config.secrets import ( - get_authentication, - get_credentials, - has_authentication, -) -from linkedin_mcp_server.exceptions import ( - CredentialsNotFoundError, - DriverInitializationError, -) +from linkedin_mcp_server.exceptions import DriverInitializationError + +# Constants +DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} @@ -45,23 +35,17 @@ logger = logging.getLogger(__name__) -def get_or_create_driver() -> Optional[webdriver.Chrome]: +def create_chrome_driver() -> webdriver.Chrome: """ - Get existing driver or create a new one using the configured settings. + Create a new Chrome WebDriver instance with proper configuration. Returns: - Optional[webdriver.Chrome]: Chrome WebDriver instance or None if initialization fails - in non-interactive mode + webdriver.Chrome: Configured Chrome WebDriver instance Raises: - WebDriverException: If the driver cannot be created and not in non-interactive mode + WebDriverException: If driver creation fails """ config = get_config() - session_id = "default" # We use a single session for simplicity - - # Return existing driver if available - if session_id in active_drivers: - return active_drivers[session_id] # Set up Chrome options chrome_options = Options() @@ -71,85 +55,47 @@ def get_or_create_driver() -> Optional[webdriver.Chrome]: if config.chrome.headless: chrome_options.add_argument("--headless=new") - # Add essential options for stability (compatible with both Grid and direct) + # Add essential options for stability chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--disable-dev-shm-usage") chrome_options.add_argument("--disable-gpu") chrome_options.add_argument("--window-size=1920,1080") chrome_options.add_argument("--disable-extensions") chrome_options.add_argument("--disable-background-timer-throttling") - chrome_options.add_argument( - "--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36" - ) + + # Set user agent (configurable with sensible default) + user_agent = getattr(config.chrome, "user_agent", DEFAULT_USER_AGENT) + chrome_options.add_argument(f"--user-agent={user_agent}") # Add any custom browser arguments from config for arg in config.chrome.browser_args: chrome_options.add_argument(arg) # Initialize Chrome driver - try: - logger.info("Initializing Chrome WebDriver...") + logger.info("Initializing Chrome WebDriver...") - # Use ChromeDriver path from environment or config - chromedriver_path = ( - os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path - ) + # Use ChromeDriver path from environment or config + chromedriver_path = ( + os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path + ) - if chromedriver_path: - logger.info(f"Using ChromeDriver at path: {chromedriver_path}") - service = Service(executable_path=chromedriver_path) - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - logger.info("Using auto-detected ChromeDriver") - driver = webdriver.Chrome(options=chrome_options) + if chromedriver_path: + logger.info(f"Using ChromeDriver at path: {chromedriver_path}") + service = Service(executable_path=chromedriver_path) + driver = webdriver.Chrome(service=service, options=chrome_options) + else: + logger.info("Using auto-detected ChromeDriver") + driver = webdriver.Chrome(options=chrome_options) - logger.info("Chrome WebDriver initialized successfully") + logger.info("Chrome WebDriver initialized successfully") - # Add a page load timeout for safety - driver.set_page_load_timeout(60) + # Add a page load timeout for safety + driver.set_page_load_timeout(60) - # Try to log in with retry loop - max_retries = 3 - for attempt in range(max_retries): - try: - if login_to_linkedin(driver): - logger.info("Successfully logged in to LinkedIn") - active_drivers[session_id] = driver - return driver - except ( - CaptchaRequiredError, - InvalidCredentialsError, - SecurityChallengeError, - TwoFactorAuthError, - RateLimitError, - LoginTimeoutError, - CredentialsNotFoundError, - ) as e: - if config.chrome.non_interactive: - # In non-interactive mode, propagate the error - driver.quit() - raise e - else: - # In interactive mode, handle the error and potentially retry - should_retry = handle_login_error(e) - if should_retry and attempt < max_retries - 1: - logger.info(f"Retry attempt {attempt + 2}/{max_retries}") - continue - else: - # Clean up driver on final failure - driver.quit() - return None - except Exception as e: - error_msg = f"Error creating web driver: {e}" - logger.error( - error_msg, - extra={"exception_type": type(e).__name__, "exception_message": str(e)}, - ) + # Set shorter implicit wait for faster cookie validation + driver.implicitly_wait(10) - if config.chrome.non_interactive: - raise DriverInitializationError(error_msg) - else: - raise WebDriverException(error_msg) + return driver def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: @@ -166,301 +112,122 @@ def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: try: from linkedin_scraper import actions # type: ignore - # Use linkedin-scraper cookie login + logger.info("Attempting cookie authentication...") + + # Set shorter timeout for faster failure detection + driver.set_page_load_timeout(15) + actions.login(driver, cookie=cookie) - # Verify login by checking current URL + # Quick check - if we're on login page, cookie is invalid current_url = driver.current_url - if ( + if "login" in current_url or "uas/login" in current_url: + logger.warning("Cookie authentication failed - redirected to login page") + return False + elif ( "feed" in current_url or "mynetwork" in current_url or "linkedin.com/in/" in current_url ): + logger.info("Cookie authentication successful") return True else: + logger.warning("Cookie authentication failed - unexpected page") return False + except Exception as e: logger.warning(f"Cookie authentication failed: {e}") return False - - -def capture_session_cookie(driver: webdriver.Chrome) -> Optional[str]: - """ - Capture LinkedIn session cookie from driver. - - Args: - driver: Chrome WebDriver instance - - Returns: - Optional[str]: Session cookie if found, None otherwise - """ - try: - # Get li_at cookie which is the main LinkedIn session cookie - cookie = driver.get_cookie("li_at") - if cookie and cookie.get("value"): - return f"li_at={cookie['value']}" - return None - except Exception as e: - logger.warning(f"Failed to capture session cookie: {e}") - return None - - -def setup_driver_for_cookie_capture(email: str, password: str) -> Optional[str]: - """ - Setup a temporary driver to login and capture cookie. - - Args: - email: LinkedIn email - password: LinkedIn password - - Returns: - Optional[str]: Captured cookie if successful, None otherwise - """ - config = get_config() - - # Set up Chrome options for cookie capture - chrome_options = Options() - if config.chrome.headless: - chrome_options.add_argument("--headless=new") - - # Add essential options - chrome_options.add_argument("--no-sandbox") - chrome_options.add_argument("--disable-dev-shm-usage") - chrome_options.add_argument("--disable-gpu") - chrome_options.add_argument("--window-size=1920,1080") - - try: - # Create temporary driver - driver = webdriver.Chrome(options=chrome_options) + finally: + # Restore normal timeout driver.set_page_load_timeout(60) - # Login using linkedin-scraper - from linkedin_scraper import actions # type: ignore - - actions.login( - driver, - email, - password, - interactive=not config.chrome.non_interactive, - ) - - # Capture cookie - cookie = capture_session_cookie(driver) - - # Clean up - driver.quit() - - return cookie - - except Exception as e: - logger.error(f"Failed to capture cookie: {e}") - if "driver" in locals(): - driver.quit() - return None - -def login_to_linkedin(driver: webdriver.Chrome) -> bool: +def login_to_linkedin(driver: webdriver.Chrome, authentication: str) -> None: """ - Log in to LinkedIn using cookie-first authentication. + Log in to LinkedIn using provided authentication. Args: driver: Chrome WebDriver instance - - Returns: - bool: True if login was successful, False otherwise + authentication: LinkedIn session cookie Raises: - Various login-related errors from linkedin-scraper + Various login-related errors from linkedin-scraper or this module """ - config = get_config() - - # Try cookie authentication first - try: - cookie = get_authentication() - if login_with_cookie(driver, cookie): - logger.info("Successfully logged in to LinkedIn using cookie") - return True - else: - # Cookie login failed - clear invalid cookie from keyring - logger.warning( - "Cookie authentication failed - cookie may be expired or invalid" - ) - clear_cookie_from_keyring() - except CredentialsNotFoundError: - # No cookie available, fall back to credentials - pass - except Exception as e: - logger.warning(f"Cookie authentication failed: {e}") - # Clear invalid cookie from keyring - clear_cookie_from_keyring() - - # Fallback to credential-based login - try: - credentials = get_credentials() - except CredentialsNotFoundError as e: - if config.chrome.non_interactive: - raise e - # Only prompt if not in non-interactive mode - from linkedin_mcp_server.config.secrets import prompt_for_credentials - - credentials = prompt_for_credentials() + # Try cookie authentication + if login_with_cookie(driver, authentication): + logger.info("Successfully logged in to LinkedIn using cookie") + return - if not credentials: - raise CredentialsNotFoundError("No authentication method available") + # If we get here, cookie authentication failed + logger.error("Cookie authentication failed") - # Login to LinkedIn using enhanced linkedin-scraper - logger.info("Logging in to LinkedIn with credentials...") + # Clear invalid cookie from keyring + from linkedin_mcp_server.authentication import clear_authentication - from linkedin_scraper import actions # type: ignore + clear_authentication() + logger.info("Cleared invalid cookie from authentication storage") - # Use linkedin-scraper login but with simplified error handling + # Check current page to determine the issue try: - actions.login( - driver, - credentials["email"], - credentials["password"], - interactive=not config.chrome.non_interactive, - ) - - logger.info("Successfully logged in to LinkedIn") - - # Capture cookie for future use - cookie = capture_session_cookie(driver) - if cookie: - from linkedin_mcp_server.config.providers import save_cookie_to_keyring - - save_cookie_to_keyring(cookie) - logger.info("Session cookie captured and stored") - - return True - - except Exception: - # Check current page to determine the real issue - current_url = driver.current_url + current_url: str = driver.current_url if "checkpoint/challenge" in current_url: - # We're on a challenge page - this is the real issue, not credentials if "security check" in driver.page_source.lower(): raise SecurityChallengeError( challenge_url=current_url, message="LinkedIn requires a security challenge. Please complete it manually and restart the application.", ) else: - raise CaptchaRequiredError( - captcha_url=current_url, - ) - - elif "feed" in current_url or "mynetwork" in current_url: - # Actually logged in successfully despite the exception - logger.info("Successfully logged in to LinkedIn") - - # Capture cookie for future use - cookie = capture_session_cookie(driver) - if cookie: - from linkedin_mcp_server.config.providers import save_cookie_to_keyring - - save_cookie_to_keyring(cookie) - logger.info("Session cookie captured and stored") - - return True - + raise CaptchaRequiredError(captcha_url=current_url) else: - # Check for actual credential issues - page_source = driver.page_source.lower() - if any( - pattern in page_source - for pattern in ["wrong email", "wrong password", "incorrect", "invalid"] - ): - raise InvalidCredentialsError("Invalid LinkedIn email or password.") - elif "too many" in page_source: - raise RateLimitError( - "Too many login attempts. Please wait and try again later." - ) - else: - raise LoginTimeoutError( - "Login failed. Please check your credentials and network connection." - ) + raise InvalidCredentialsError( + "Cookie authentication failed - cookie may be expired or invalid" + ) + except Exception as e: + # If we can't determine the specific error, raise a generic one + raise LoginTimeoutError(f"Login failed: {str(e)}") -def handle_login_error(error: Exception) -> bool: - """Handle login errors in interactive mode. - Returns: - bool: True if user wants to retry, False if they want to exit +def get_or_create_driver(authentication: str) -> webdriver.Chrome: """ - config = get_config() + Get existing driver or create a new one and login. - logger.error(f"\nโŒ {str(error)}") + Args: + authentication: LinkedIn session cookie for login - if config.chrome.headless: - logger.info( - "๐Ÿ” Try running with visible browser window: uv run main.py --no-headless" - ) - - # Only allow retry for credential errors - if isinstance(error, InvalidCredentialsError): - retry = inquirer.prompt( - [ - inquirer.Confirm( - "retry", - message="Would you like to try with different credentials?", - default=True, - ), - ] - ) - if retry and retry.get("retry", False): - clear_credentials_from_keyring() - logger.info("โœ… Credentials cleared from keyring.") - logger.info("๐Ÿ”„ Retrying with new credentials...") - return True + Returns: + webdriver.Chrome: Chrome WebDriver instance, logged in and ready - return False + Raises: + DriverInitializationError: If driver creation fails + Various login-related errors: If login fails + """ + session_id = "default" # We use a single session for simplicity + # Return existing driver if available + if session_id in active_drivers: + logger.info("Using existing Chrome WebDriver session") + return active_drivers[session_id] -def initialize_driver() -> None: - """ - Initialize the driver based on global configuration. - """ - config = get_config() + try: + # Create new driver + driver = create_chrome_driver() - if config.server.lazy_init: - logger.info( - "Using lazy initialization - driver will be created on first tool call" - ) - if has_authentication(): - logger.info("LinkedIn authentication found in configuration") - else: - logger.info("No LinkedIn authentication found - will set up on first use") - return + # Login to LinkedIn + login_to_linkedin(driver, authentication) - # Pre-check authentication availability to trigger setup if needed - if not config.chrome.non_interactive and not has_authentication(): - # In interactive mode without authentication, trigger setup first - logger.info("Setting up LinkedIn authentication...") - try: - get_authentication() # This will trigger the interactive setup - except CredentialsNotFoundError: - pass # Setup was cancelled or failed, continue to driver creation - - # Validate chromedriver can be found - if config.chrome.chromedriver_path: - logger.info(f"โœ… ChromeDriver found at: {config.chrome.chromedriver_path}") - os.environ["CHROMEDRIVER"] = config.chrome.chromedriver_path - else: - logger.info("โš ๏ธ ChromeDriver not found in common locations.") - logger.info("โšก Continuing with automatic detection...") - logger.info( - "๐Ÿ’ก Tip: install ChromeDriver and set the CHROMEDRIVER environment variable" - ) + # Store successful driver + active_drivers[session_id] = driver + logger.info("Chrome WebDriver session created and authenticated successfully") - # Create driver and log in - try: - driver = get_or_create_driver() - if driver: - logger.info("โœ… Web driver initialized successfully") - else: - # Driver creation failed - always raise an error - raise DriverInitializationError("Failed to initialize web driver") + return driver + + except WebDriverException as e: + error_msg = f"Error creating web driver: {e}" + logger.error(error_msg) + raise DriverInitializationError(error_msg) except ( CaptchaRequiredError, InvalidCredentialsError, @@ -468,83 +235,56 @@ def initialize_driver() -> None: TwoFactorAuthError, RateLimitError, LoginTimeoutError, - CredentialsNotFoundError, ) as e: - # Always re-raise login-related errors so main.py can handle them + # Login-related errors - clean up driver if it was created + if session_id in active_drivers: + active_drivers[session_id].quit() + del active_drivers[session_id] raise e - except WebDriverException as e: - if config.chrome.non_interactive: - raise DriverInitializationError( - f"Failed to initialize web driver: {str(e)}" - ) - logger.error(f"โŒ Failed to initialize web driver: {str(e)}") - handle_driver_error() -def handle_driver_error() -> None: +def close_all_drivers() -> None: + """Close all active drivers and clean up resources.""" + global active_drivers + + for session_id, driver in active_drivers.items(): + try: + logger.info(f"Closing Chrome WebDriver session: {session_id}") + driver.quit() + except Exception as e: + logger.warning(f"Error closing driver {session_id}: {e}") + + active_drivers.clear() + logger.info("All Chrome WebDriver sessions closed") + + +def get_active_driver() -> Optional[webdriver.Chrome]: """ - Handle ChromeDriver initialization errors by providing helpful options. + Get the currently active driver without creating a new one. + + Returns: + Optional[webdriver.Chrome]: Active driver if available, None otherwise """ - config = get_config() + session_id = "default" + return active_drivers.get(session_id) - # Skip interactive handling in non-interactive mode - if config.chrome.non_interactive: - logger.error( - "โŒ ChromeDriver is required for this application to work properly." - ) - sys.exit(1) - - questions = [ - inquirer.List( - "chromedriver_action", - message="What would you like to do?", - choices=[ - ("Specify ChromeDriver path manually", "specify"), - ("Get help installing ChromeDriver", "help"), - ("Exit", "exit"), - ], - ), - ] - answers = inquirer.prompt(questions) - - if answers["chromedriver_action"] == "specify": - path = inquirer.prompt( - [inquirer.Text("custom_path", message="Enter ChromeDriver path")] - )["custom_path"] - - if os.path.exists(path): - # Update config with the new path - config.chrome.chromedriver_path = path - os.environ["CHROMEDRIVER"] = path - logger.info(f"โœ… ChromeDriver path set to: {path}") - logger.info( - "๐Ÿ’ก Please restart the application to use the new ChromeDriver path." - ) - logger.info(" Example: uv run main.py") - sys.exit(0) - else: - logger.warning(f"โš ๏ธ Warning: The specified path does not exist: {path}") - logger.info("๐Ÿ’ก Please check the path and restart the application.") - sys.exit(1) - - elif answers["chromedriver_action"] == "help": - logger.info("\n๐Ÿ“‹ ChromeDriver Installation Guide:") - logger.info( - "1. Find your Chrome version: Chrome menu > Help > About Google Chrome" - ) - logger.info( - "2. Download matching ChromeDriver: https://chromedriver.chromium.org/downloads" - ) - logger.info("3. Place ChromeDriver in a location on your PATH") - logger.info(" - macOS/Linux: /usr/local/bin/ is recommended") - logger.info( - " - Windows: Add to a directory in your PATH or specify the full path\n" - ) - - if inquirer.prompt( - [inquirer.Confirm("try_again", message="Try again?", default=True)] - )["try_again"]: - initialize_driver() - - logger.error("โŒ ChromeDriver is required for this application to work properly.") - sys.exit(1) + +def capture_session_cookie(driver: webdriver.Chrome) -> Optional[str]: + """ + Capture LinkedIn session cookie from driver. + + Args: + driver: Chrome WebDriver instance + + Returns: + Optional[str]: Session cookie if found, None otherwise + """ + try: + # Get li_at cookie which is the main LinkedIn session cookie + cookie = driver.get_cookie("li_at") + if cookie and cookie.get("value"): + return f"li_at={cookie['value']}" + return None + except Exception as e: + logger.warning(f"Failed to capture session cookie: {e}") + return None diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 1e9d3d29..745f306e 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -68,9 +68,9 @@ def convert_exception_to_response( """ if isinstance(exception, CredentialsNotFoundError): return { - "error": "credentials_not_found", + "error": "authentication_not_found", "message": str(exception), - "resolution": "Provide LinkedIn credentials via environment variables", + "resolution": "Provide LinkedIn cookie via LINKEDIN_COOKIE environment variable or run setup", } elif isinstance(exception, InvalidCredentialsError): @@ -161,17 +161,18 @@ def safe_get_driver(): Safely get or create a driver with proper error handling. Returns: - Driver instance or None if initialization fails + Driver instance Raises: - LinkedInMCPError: If driver initialization fails in non-interactive mode + LinkedInMCPError: If driver initialization fails """ + from linkedin_mcp_server.authentication import ensure_authentication from linkedin_mcp_server.drivers.chrome import get_or_create_driver - driver = get_or_create_driver() - if not driver: - from linkedin_mcp_server.exceptions import DriverInitializationError + # Get authentication first + authentication = ensure_authentication() - raise DriverInitializationError("Failed to initialize Chrome driver") + # Create driver with authentication + driver = get_or_create_driver(authentication) return driver diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 98616ede..c5871a24 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -84,7 +84,8 @@ def configure_logging(debug: bool = False, json_format: bool = False) -> None: debug: Whether to enable debug logging json_format: Whether to use JSON formatting for logs """ - log_level = logging.DEBUG if debug else logging.INFO + # Set end-user appropriate logging level: WARNING for production, DEBUG for debug mode + log_level = logging.DEBUG if debug else logging.WARNING if json_format: formatter = MCPJSONFormatter() @@ -104,6 +105,7 @@ def configure_logging(debug: bool = False, json_format: bool = False) -> None: console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) - # Set specific loggers - logging.getLogger("selenium").setLevel(logging.WARNING) - logging.getLogger("urllib3").setLevel(logging.WARNING) + # Set specific loggers to reduce noise + logging.getLogger("selenium").setLevel(logging.ERROR) + logging.getLogger("urllib3").setLevel(logging.ERROR) + logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py new file mode 100644 index 00000000..5208cb1e --- /dev/null +++ b/linkedin_mcp_server/setup.py @@ -0,0 +1,337 @@ +# linkedin_mcp_server/setup.py +""" +Interactive setup module for LinkedIn MCP Server. + +This module handles interactive setup flows and authentication configuration. +""" + +import logging +import os +from contextlib import contextmanager +from typing import Dict, Iterator + +import inquirer +from selenium import webdriver +from selenium.webdriver.chrome.options import Options + +from linkedin_mcp_server.authentication import store_authentication +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.providers import ( + get_credentials_from_keyring, + save_credentials_to_keyring, +) +from linkedin_mcp_server.config.schema import AppConfig +from linkedin_mcp_server.exceptions import CredentialsNotFoundError + +logger = logging.getLogger(__name__) + + +def get_credentials_for_setup() -> Dict[str, str]: + """ + Get LinkedIn credentials for setup purposes. + + Returns: + Dict[str, str]: Dictionary with email and password + + Raises: + CredentialsNotFoundError: If credentials cannot be obtained + """ + config = get_config() + + # First, try configuration (includes environment variables) + if config.linkedin.email and config.linkedin.password: + logger.info("Using LinkedIn credentials from configuration") + return {"email": config.linkedin.email, "password": config.linkedin.password} + + # Second, try keyring if enabled + if config.linkedin.use_keyring: + credentials = get_credentials_from_keyring() + if credentials["email"] and credentials["password"]: + logger.info("Using LinkedIn credentials from keyring") + return {"email": credentials["email"], "password": credentials["password"]} + + # If in non-interactive mode and no credentials found, raise error + if config.chrome.non_interactive: + raise CredentialsNotFoundError( + "No LinkedIn credentials found. Please provide credentials via " + "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) for setup." + ) + + # Otherwise, prompt for credentials + return prompt_for_credentials() + + +def prompt_for_credentials() -> Dict[str, str]: + """ + Prompt user for LinkedIn credentials. + + Returns: + Dict[str, str]: Dictionary with email and password + + Raises: + KeyboardInterrupt: If user cancels input + """ + config: AppConfig = get_config() + + print("๐Ÿ”‘ LinkedIn credentials required for setup") + questions = [ + inquirer.Text("email", message="LinkedIn Email"), + inquirer.Password("password", message="LinkedIn Password"), + ] + credentials: dict[str, str] = inquirer.prompt(questions) + + if not credentials: + raise KeyboardInterrupt("Credential input was cancelled") + + # Store credentials securely in keyring if enabled + if config.linkedin.use_keyring: + if save_credentials_to_keyring(credentials["email"], credentials["password"]): + logger.info("Credentials stored securely in keyring") + else: + logger.warning("Could not store credentials in system keyring") + + return credentials + + +@contextmanager +def temporary_chrome_driver() -> Iterator[webdriver.Chrome]: + """ + Context manager for creating temporary Chrome driver with automatic cleanup. + + Yields: + webdriver.Chrome: Configured Chrome WebDriver instance + + Raises: + Exception: If driver creation fails + """ + config: AppConfig = get_config() + + logger.info("Creating temporary browser for cookie capture...") + + # Set up Chrome options for cookie capture + chrome_options = Options() + if config.chrome.headless: + chrome_options.add_argument("--headless=new") + + # Add essential options + # chrome_options.add_argument("--no-sandbox") + # chrome_options.add_argument("--disable-dev-shm-usage") + # chrome_options.add_argument("--disable-gpu") + # chrome_options.add_argument("--window-size=3456,2234") + + driver = None + try: + # Create temporary driver + chromedriver_path = ( + os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path + ) + + if chromedriver_path: + from selenium.webdriver.chrome.service import Service + + service = Service(executable_path=chromedriver_path) + driver = webdriver.Chrome(service=service, options=chrome_options) + else: + driver = webdriver.Chrome(options=chrome_options) + + driver.set_page_load_timeout(60) + yield driver + + finally: + if driver: + driver.quit() + + +def capture_cookie_from_credentials(email: str, password: str) -> str: + """ + Login with credentials and capture session cookie using temporary driver. + + Args: + email: LinkedIn email + password: LinkedIn password + + Returns: + str: Captured session cookie + + Raises: + Exception: If login or cookie capture fails + """ + with temporary_chrome_driver() as driver: + # Login using linkedin-scraper + from linkedin_scraper import actions + + config: AppConfig = get_config() + interactive: bool = not config.chrome.non_interactive + logger.info(f"Logging in to LinkedIn... Interactive: {interactive}") + actions.login( + driver, + email, + password, + timeout=60, # longer timeout for login (captcha, mobile verification, etc.) + interactive=interactive, # Respect configuration setting + ) + + # Capture cookie + cookie_obj: dict[str, str] = driver.get_cookie("li_at") + if cookie_obj and cookie_obj.get("value"): + cookie: str = cookie_obj["value"] + logger.info("Successfully captured session cookie") + return cookie + else: + raise Exception("Failed to capture session cookie from browser") + + +def test_cookie_validity(cookie: str) -> bool: + """ + Test if a cookie is valid by attempting to use it with a temporary driver. + + Args: + cookie: LinkedIn session cookie to test + + Returns: + bool: True if cookie is valid, False otherwise + """ + try: + with temporary_chrome_driver() as driver: + from linkedin_mcp_server.drivers.chrome import login_with_cookie + + return login_with_cookie(driver, cookie) + except Exception as e: + logger.warning(f"Cookie validation failed: {e}") + return False + + +def prompt_for_cookie() -> str: + """ + Prompt user to input LinkedIn cookie directly. + + Returns: + str: LinkedIn session cookie + + Raises: + KeyboardInterrupt: If user cancels input + ValueError: If cookie format is invalid + """ + print("๐Ÿช Please provide your LinkedIn session cookie") + cookie = inquirer.text("LinkedIn Cookie") + + if not cookie: + raise KeyboardInterrupt("Cookie input was cancelled") + + # Normalize cookie format + if cookie.startswith("li_at="): + cookie: str = cookie.split("li_at=")[1] + + return cookie + + +def run_interactive_setup() -> str: + """ + Run interactive setup to configure authentication. + + Returns: + str: Configured LinkedIn session cookie + + Raises: + Exception: If setup fails + """ + print("๐Ÿ”— LinkedIn MCP Server Setup") + print("Choose how you'd like to authenticate:") + + # Ask user for setup method + setup_method = inquirer.list_input( + "Setup method", + choices=[ + ("I have a LinkedIn cookie", "cookie"), + ("Login with email/password to get cookie", "credentials"), + ], + default="cookie", + ) + + if setup_method == "cookie": + # User provides cookie directly + cookie = prompt_for_cookie() + + # Test the cookie with a temporary driver + print("๐Ÿ” Testing provided cookie...") + if test_cookie_validity(cookie): + # Store the valid cookie + store_authentication(cookie) + logger.info("โœ… Authentication configured successfully") + return cookie + else: + print("โŒ The provided cookie is invalid or expired") + retry = inquirer.confirm( + "Would you like to try with email/password instead?", default=True + ) + if not retry: + raise Exception("Setup cancelled - invalid cookie provided") + + # Fall through to credentials flow + setup_method = "credentials" + + if setup_method == "credentials": + # Get credentials and attempt login with retry + max_retries = 3 + for attempt in range(max_retries): + try: + credentials = get_credentials_for_setup() + + print("๐Ÿ”‘ Logging in to capture session cookie...") + cookie = capture_cookie_from_credentials( + credentials["email"], credentials["password"] + ) + + # Store the captured cookie + store_authentication(cookie) + logger.info("โœ… Authentication configured successfully") + return cookie + + except Exception as e: + logger.error(f"Login failed: {e}") + print(f"โŒ Login failed: {e}") + + if attempt < max_retries - 1: + retry = inquirer.confirm( + "Would you like to try with different credentials?", + default=True, + ) + if not retry: + break + # Clear stored credentials to prompt for new ones + from linkedin_mcp_server.config.providers import ( + clear_credentials_from_keyring, + ) + + clear_credentials_from_keyring() + else: + raise Exception(f"Setup failed after {max_retries} attempts") + + raise Exception("Setup cancelled by user") + + # This should never be reached, but ensures type checker knows all paths are covered + raise Exception("Unexpected setup flow completion") + + +def run_cookie_extraction_setup() -> str: + """ + Run setup specifically for cookie extraction (--get-cookie mode). + + Returns: + str: Captured LinkedIn session cookie for display + + Raises: + Exception: If setup fails + """ + logger.info("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction mode started") + print("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction") + + # Get credentials + credentials: dict[str, str] = get_credentials_for_setup() + + # Capture cookie + cookie: str = capture_cookie_from_credentials( + credentials["email"], credentials["password"] + ) + + return cookie diff --git a/main.py b/main.py index 747eadc4..eda54fb7 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,11 @@ # main.py """ LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. + +Clean architecture with clear phase separation: +1. Authentication Setup Phase +2. Driver Management Phase +3. Server Runtime Phase """ import logging @@ -17,14 +22,17 @@ TwoFactorAuthError, ) +from linkedin_mcp_server.authentication import ( + ensure_authentication, + has_authentication, +) from linkedin_mcp_server.cli import print_claude_config - -# Import the new centralized configuration from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.drivers.chrome import initialize_driver -from linkedin_mcp_server.exceptions import LinkedInMCPError +from linkedin_mcp_server.drivers.chrome import close_all_drivers, get_or_create_driver +from linkedin_mcp_server.exceptions import CredentialsNotFoundError, LinkedInMCPError from linkedin_mcp_server.logging_config import configure_logging from linkedin_mcp_server.server import create_mcp_server, shutdown_handler +from linkedin_mcp_server.setup import run_cookie_extraction_setup, run_interactive_setup logger = logging.getLogger(__name__) @@ -43,147 +51,272 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: ) ] answers = inquirer.prompt(questions) + + if not answers: + raise KeyboardInterrupt("Transport selection cancelled by user") + return answers["transport"] def get_cookie_and_exit() -> None: """Get LinkedIn cookie and exit (for Docker setup).""" - print("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction ๐Ÿ”—") - print("=" * 50) - config = get_config() - # Configure logging + # Configure logging - prioritize debug mode over non_interactive configure_logging( debug=config.server.debug, - json_format=config.chrome.non_interactive, + json_format=config.chrome.non_interactive and not config.server.debug, ) - try: - from linkedin_mcp_server.config.secrets import get_credentials - from linkedin_mcp_server.drivers.chrome import setup_driver_for_cookie_capture - - # Get credentials - credentials = get_credentials() - - print("๐Ÿ”‘ Logging in to LinkedIn...") - cookie = setup_driver_for_cookie_capture( - credentials["email"], credentials["password"] - ) + logger.info("LinkedIn MCP Server - Cookie Extraction mode started") - if cookie: - print("โœ… Login successful!") - print(f"๐Ÿช LinkedIn Cookie: {cookie}") + try: + # Run cookie extraction setup + cookie = run_cookie_extraction_setup() - # Try to copy to clipboard - try: - import pyperclip + logger.info("Cookie extraction successful") + print("โœ… Login successful!") + print(f"๐Ÿช LinkedIn Cookie: {cookie}") - pyperclip.copy(cookie) - print("๐Ÿ“‹ Cookie copied to clipboard!") - except Exception as e: - logger.warning(f"Could not copy to clipboard: {e}") - print("โš ๏ธ Copy the cookie above manually") + # Try to copy to clipboard + try: + import pyperclip - print("\n๐Ÿ“ Usage:") - print("1. Copy the cookie above") - print("2. Set LINKEDIN_COOKIE environment variable in your Docker setup") - print("3. Or paste into Claude Desktop configuration") + pyperclip.copy(cookie) + print("๐Ÿ“‹ Cookie copied to clipboard!") + except Exception as e: + logger.warning(f"Could not copy to clipboard: {e}") + print("โš ๏ธ Copy the cookie above manually") - else: - print("โŒ Failed to obtain cookie") - sys.exit(1) + print("\n๐Ÿ“ Usage:") + print("1. Copy the cookie above") + print("2. Set LINKEDIN_COOKIE environment variable in your Docker setup") + print("3. Or paste into Claude Desktop configuration") except Exception as e: logger.error(f"Error getting cookie: {e}") - print(f"โŒ Error getting cookie: {e}") + + # Provide specific guidance for security challenges + error_msg = str(e).lower() + if "security challenge" in error_msg or "captcha" in error_msg: + print("โŒ LinkedIn security challenge detected") + print("๐Ÿ’ก Try one of these solutions:") + print( + " 1. Use an existing LinkedIn cookie instead (see instructions below)" + ) + print(" 2. Login to LinkedIn in your browser first, then retry") + print( + " 3. Use --no-headless to see and complete the security challenge manually" + ) + print("\n๐Ÿช To get your LinkedIn cookie manually:") + print(" 1. Login to LinkedIn in your browser") + print(" 2. Open Developer Tools (F12)") + print(" 3. Go to Application/Storage > Cookies > linkedin.com") + print(" 4. Copy the 'li_at' cookie value") + print(" 5. Set LINKEDIN_COOKIE environment variable or use --cookie flag") + elif "invalid credentials" in error_msg: + print("โŒ Invalid LinkedIn credentials") + print("๐Ÿ’ก Please check your email and password") + else: + print("โŒ Failed to obtain cookie - check your credentials") sys.exit(1) sys.exit(0) +def ensure_authentication_ready() -> str: + """ + Phase 1: Ensure authentication is ready before any drivers are created. + + Returns: + str: Valid LinkedIn session cookie + + Raises: + CredentialsNotFoundError: If authentication setup fails + """ + config = get_config() + + # Check if authentication already exists + if has_authentication(): + try: + return ensure_authentication() + except CredentialsNotFoundError: + # Authentication exists but might be invalid, continue to setup + pass + + # If in non-interactive mode and no auth, fail immediately + if config.chrome.non_interactive: + raise CredentialsNotFoundError( + "No LinkedIn authentication found. Please provide cookie via " + "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." + ) + + # Run interactive setup + logger.info("Setting up LinkedIn authentication...") + return run_interactive_setup() + + +def initialize_driver_with_auth(authentication: str) -> None: + """ + Phase 2: Initialize driver using existing authentication. + + Args: + authentication: LinkedIn session cookie + + Raises: + Various exceptions if driver creation or login fails + """ + config = get_config() + + if config.server.lazy_init: + logger.info( + "Using lazy initialization - driver will be created on first tool call" + ) + return + + logger.info("Initializing Chrome WebDriver and logging in...") + + try: + # Create driver and login with provided authentication + get_or_create_driver(authentication) + logger.info("โœ… Web driver initialized and authenticated successfully") + + except Exception as e: + logger.error(f"Failed to initialize driver: {e}") + raise e + + def main() -> None: - """Initialize and run the LinkedIn MCP server.""" + """Main application entry point with clear phase separation.""" + logger.info("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") print("=" * 40) - # Get configuration using the new centralized system + # Get configuration config = get_config() - # Handle --get-cookie flag + # Handle --get-cookie flag immediately if config.server.get_cookie: get_cookie_and_exit() - # Configure logging + # Configure logging - prioritize debug mode over non_interactive configure_logging( debug=config.server.debug, - json_format=config.chrome.non_interactive, # Use JSON format in non-interactive mode + json_format=config.chrome.non_interactive and not config.server.debug, ) logger.debug(f"Server configuration: {config}") - # Initialize the driver with configuration (initialize driver checks for lazy init options) + # Phase 1: Ensure Authentication is Ready try: - initialize_driver() + authentication = ensure_authentication_ready() + print("โœ… Authentication ready") + logger.info("Authentication ready") + except CredentialsNotFoundError as e: + logger.error(f"Authentication setup failed: {e}") + print( + "\nโŒ Authentication required - please provide LinkedIn cookie or credentials" + ) + sys.exit(1) + except KeyboardInterrupt: + print("\n\n๐Ÿ‘‹ Setup cancelled by user") + sys.exit(0) + except Exception as e: + logger.error(f"Unexpected error during authentication setup: {e}") + print("\nโŒ Setup failed - please try again") + sys.exit(1) + + # Phase 2: Initialize Driver (if not lazy) + try: + initialize_driver_with_auth(authentication) + except InvalidCredentialsError as e: + logger.error(f"Driver initialization failed with invalid credentials: {e}") + + # Cookie was already cleared in driver layer + # In interactive mode, try setup again + if not config.chrome.non_interactive and config.server.setup: + print(f"\nโŒ {str(e)}") + print("๐Ÿ”„ Starting interactive setup for new authentication...") + try: + new_authentication = run_interactive_setup() + # Try again with new authentication + initialize_driver_with_auth(new_authentication) + logger.info("โœ… Successfully authenticated with new credentials") + except Exception as setup_error: + logger.error(f"Setup failed: {setup_error}") + print(f"\nโŒ Setup failed: {setup_error}") + sys.exit(1) + else: + print(f"\nโŒ {str(e)}") + if not config.server.lazy_init: + sys.exit(1) except ( LinkedInMCPError, CaptchaRequiredError, - InvalidCredentialsError, SecurityChallengeError, TwoFactorAuthError, RateLimitError, LoginTimeoutError, ) as e: - logger.error( - f"Failed to initialize driver: {str(e)}", - extra={"error_type": type(e).__name__, "error_details": str(e)}, - ) - - # Always terminate if login fails and we're not using lazy initialization + logger.error(f"Driver initialization failed: {e}") + print(f"\nโŒ {str(e)}") if not config.server.lazy_init: - print(f"\nโŒ {str(e)}") - sys.exit(1) - - # In lazy init mode with non-interactive, still exit on error - if config.chrome.non_interactive: sys.exit(1) - else: - print(f"\nโŒ Error: {str(e)}") - print("๐Ÿ’ก Tip: Check your credentials and try again.") + except Exception as e: + logger.error(f"Unexpected error during driver initialization: {e}") + print(f"\nโŒ Driver initialization failed: {e}") + if not config.server.lazy_init: sys.exit(1) - # Decide transport - transport = config.server.transport - if config.server.setup: - transport = choose_transport_interactive() - - # Print configuration for Claude if in setup mode and using stdio transport - if config.server.setup and transport == "stdio": - print_claude_config() - - # Create and run the MCP server - mcp = create_mcp_server() + # Phase 3: Server Runtime + try: + # Decide transport + transport = config.server.transport + if config.server.setup: + print("\n๐Ÿš€ Server ready! Choose transport mode:") + transport = choose_transport_interactive() + + # Print configuration for Claude if in setup mode and using stdio transport + if config.server.setup and transport == "stdio": + print_claude_config() + + # Create and run the MCP server + mcp = create_mcp_server() + + # Start server + print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") + if transport == "streamable-http": + print( + f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" + ) + mcp.run( + transport=transport, + host=config.server.host, + port=config.server.port, + path=config.server.path, + ) + else: + mcp.run(transport=transport) - # Start server - print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") - if transport == "streamable-http": - print( - f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" - ) - mcp.run( - transport=transport, - host=config.server.host, - port=config.server.port, - path=config.server.path, - ) - else: - mcp.run(transport=transport) + except KeyboardInterrupt: + print("\n\n๐Ÿ‘‹ Server stopped by user") + exit_gracefully(0) + except Exception as e: + logger.error(f"Server runtime error: {e}") + print(f"\nโŒ Server error: {e}") + exit_gracefully(1) def exit_gracefully(exit_code: int = 0) -> None: """Exit the application gracefully, cleaning up resources.""" print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") + + # Clean up drivers + close_all_drivers() + + # Clean up server shutdown_handler() + sys.exit(exit_code) diff --git a/uv.lock b/uv.lock index 527cc634..6a1638f1 100644 --- a/uv.lock +++ b/uv.lock @@ -702,7 +702,7 @@ dev = [ [[package]] name = "linkedin-scraper" version = "2.11.5" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#1d6ff82f8b0950b060529b12102a674cfabad1bb" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#30f448df90af834bafb7d9e4caebfd0032605163" } dependencies = [ { name = "lxml" }, { name = "python-dotenv" }, From 79ca250bfa1f1c7c1e1639b602ea834ae15c22c1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 05:17:25 -0400 Subject: [PATCH 126/565] fix(cookie): improve cookie extraction messages --- linkedin_mcp_server/config/loaders.py | 2 -- main.py | 17 +++++++---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 2143ae71..771d9635 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -175,8 +175,6 @@ def load_from_args(config: AppConfig) -> AppConfig: if hasattr(args, "get_cookie") and args.get_cookie: config.server.get_cookie = True - config.chrome.non_interactive = True - if args.cookie: config.linkedin.cookie = args.cookie diff --git a/main.py b/main.py index eda54fb7..10a32ef9 100644 --- a/main.py +++ b/main.py @@ -76,23 +76,21 @@ def get_cookie_and_exit() -> None: logger.info("Cookie extraction successful") print("โœ… Login successful!") - print(f"๐Ÿช LinkedIn Cookie: {cookie}") + print("๐Ÿช LinkedIn Cookie extracted:") + print(cookie) # Try to copy to clipboard try: import pyperclip pyperclip.copy(cookie) - print("๐Ÿ“‹ Cookie copied to clipboard!") + print( + "๐Ÿ“‹ Cookie copied to clipboard! Now you can set the LINKEDIN_COOKIE environment variable in your configuration" + ) except Exception as e: logger.warning(f"Could not copy to clipboard: {e}") print("โš ๏ธ Copy the cookie above manually") - print("\n๐Ÿ“ Usage:") - print("1. Copy the cookie above") - print("2. Set LINKEDIN_COOKIE environment variable in your Docker setup") - print("3. Or paste into Claude Desktop configuration") - except Exception as e: logger.error(f"Error getting cookie: {e}") @@ -102,11 +100,10 @@ def get_cookie_and_exit() -> None: print("โŒ LinkedIn security challenge detected") print("๐Ÿ’ก Try one of these solutions:") print( - " 1. Use an existing LinkedIn cookie instead (see instructions below)" + " 1. Use an existing LinkedIn cookie from your browser instead (see instructions below)" ) - print(" 2. Login to LinkedIn in your browser first, then retry") print( - " 3. Use --no-headless to see and complete the security challenge manually" + " 2. Use --no-headless flag (manual installation required, does not work with Docker) and solve the security challenge manually" ) print("\n๐Ÿช To get your LinkedIn cookie manually:") print(" 1. Login to LinkedIn in your browser") From 0ccdd5ec9f760d7e0794514cbbc120cb55741ee6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 05:21:43 -0400 Subject: [PATCH 127/565] chore(vscode): add task to pack DXT package --- .vscode/tasks.json | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4c613079..c541f611 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -133,5 +133,22 @@ }, "problemMatcher": [] }, + { + "label": "bunx @anthropic-ai/dxt pack", + "detail": "Pack the DXT package", + "type": "shell", + "command": "bunx", + "args": ["@anthropic-ai/dxt", "pack"], + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + } ] } From a27f5d0bc106fe28d53b4c4c26b6816dce6081a0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 12:07:38 -0400 Subject: [PATCH 128/565] fix(manifest): update LinkedIn cookie description --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 0311c434..69e74f41 100644 --- a/manifest.json +++ b/manifest.json @@ -85,7 +85,7 @@ "user_config": { "linkedin_cookie": { "title": "LinkedIn Cookie", - "description": "LinkedIn session cookie. Run 'docker run -it --rm -e LINKEDIN_EMAIL=your@email.com -e LINKEDIN_PASSWORD=yourpass stickerdaniel/linkedin-mcp-server --get-cookie' to obtain", + "description": "LinkedIn li_at session cookie. Follow the instructions in the README to get it.", "type": "string", "required": true, "sensitive": true From 282e77f1d979164552c0c4b725eecdcae1e5cae0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 12:53:35 -0400 Subject: [PATCH 129/565] docs(readme): enhance LinkedIn cookie retrieval instructions --- README.md | 103 ++++++++++++++++++++++++++++++++++++++++++++------ manifest.json | 2 +- 2 files changed, 92 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 6c130639..1d83e85c 100644 --- a/README.md +++ b/README.md @@ -36,17 +36,16 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > [!NOTE] > July 2025: All tools are currently functional and actively maintained. If you encounter any issues, please report them in the [GitHub issues](https://github.com/stickerdaniel/linkedin-mcp-server/issues). ---- +
+
## ๐Ÿณ Docker Setup (Recommended - Universal) **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. -**Zero setup required** - just add the mcp server to your client config and replace email and password with your linkedin credentials. - ### Installation -**Claude Desktop:** +**Client Configuration:** ```json { "mcpServers": { @@ -54,20 +53,52 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "command": "docker", "args": [ "run", "-i", "--rm", - "-e", "LINKEDIN_EMAIL", - "-e", "LINKEDIN_PASSWORD", + "-e", "LINKEDIN_COOKIE", "stickerdaniel/linkedin-mcp-server", "--no-setup" ], "env": { - "LINKEDIN_EMAIL": "your.email@example.com", - "LINKEDIN_PASSWORD": "your_password" + "LINKEDIN_COOKIE": "XXXXXX...", } } } } ``` +### Getting the LinkedIn Cookie +
+๐Ÿณ Docker get-cookie method + +**Run the server with the `--get-cookie` flag:** +```bash +docker run -i --rm \ + -e LINKEDIN_EMAIL="your.email@example.com" \ + -e LINKEDIN_PASSWORD="your_password" \ + stickerdaniel/linkedin-mcp-server \ + --get-cookie +``` +Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. +
+
+๐ŸŒ Chrome DevTools Guide + +1. Open LinkedIn and login +2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) +3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +4. Find the cookie named `li_at` +5. Copy the **Value** field (this is your LinkedIn session cookie) +6. Use this value as your `LINKEDIN_COOKIE` in the configuration + +
+
+ +> [!NOTE] +> The cookie will expire during the next 30 days. Just get the new cookie and update your config. + +> [!TIP] +> There are also many cookie manager extensions that you can use to easily get the cookie. + +### Docker Setup Help
๐Ÿ”ง Configuration @@ -83,6 +114,8 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) +- `--get-cookie` - Attempt to login with email and password and extract the LinkedIn cookie +- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login **HTTP Mode Example (for web-based MCP clients):** ```bash @@ -116,6 +149,9 @@ docker run -i --rm \ - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually)
+
+
+ ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) **Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed @@ -126,6 +162,40 @@ docker run -i --rm \ 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately +### Getting the LinkedIn Cookie +
+๐Ÿณ Docker get-cookie method + +**Run the server with the `--get-cookie` flag:** +```bash +docker run -i --rm \ + -e LINKEDIN_EMAIL="your.email@example.com" \ + -e LINKEDIN_PASSWORD="your_password" \ + stickerdaniel/linkedin-mcp-server \ + --get-cookie +``` +Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. +
+
+๐ŸŒ Chrome DevTools Guide + +1. Open LinkedIn and login +2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) +3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +4. Find the cookie named `li_at` +5. Copy the **Value** field (this is your LinkedIn session cookie) +6. Use this value as your `LINKEDIN_COOKIE` in the configuration + +
+
+ +> [!NOTE] +> The cookie will expire during the next 30 days. Just get the new cookie and update your config. + +> [!TIP] +> There are also many cookie manager extensions that you can use to easily get the cookie. + +### DXT Extension Setup Help
โ— Troubleshooting @@ -139,6 +209,9 @@ docker run -i --rm \ - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually)
+
+
+ ## ๐Ÿ Local Setup (Develop & Contribute) **Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed @@ -170,21 +243,25 @@ uv sync --group dev uv run pre-commit install # 5. Start the server once manually -# (you will be prompted to enter your LinkedIn credentials, and they are securely stored in your OS keychain) +# You will be prompted to enter your LinkedIn credentials, and they will be securely stored in your OS keychain +# Once logged in, your cookie will be stored in your OS keychain and used for subsequent runs until it expires uv run main.py --no-headless --no-lazy-init ``` +### Local Setup Help
๐Ÿ”ง Configuration **CLI Options:** - `--no-headless` - Show browser window (debugging) - `--debug` - Enable detailed logging -- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env or or run the server once manualy, then it will be stored in your OS keychain and you can run the server without credentials) +- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_COOKIE` or `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env or that you run the server once manually, so the authentication is stored in your OS keychain and you can run the server without credentials) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call +- `--get-cookie` - Login with email and password and extract the LinkedIn cookie +- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login **Claude Desktop:** -```json +```**json** { "mcpServers": { "linkedin": { @@ -217,7 +294,9 @@ uv run main.py --no-headless --no-lazy-init Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! ---- + +
+
## License diff --git a/manifest.json b/manifest.json index 69e74f41..950a5ba9 100644 --- a/manifest.json +++ b/manifest.json @@ -52,7 +52,7 @@ "properties": { "company_url": { "type": "string", - "description": "LinkedIn company URL (e.g., https://www.linkedin.com/company/company-name/)" + "description": "LinkedIn company URL (e.g., https://www.linkedin.com/company/docker/)" } }, "required": ["company_url"] From 0436424040825bc2546d7f0788322a666d0d7823 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:03:57 -0400 Subject: [PATCH 130/565] refactor(authentication): update cookie length constants --- linkedin_mcp_server/authentication.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index fb3ea828..7d54b528 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -9,15 +9,15 @@ from linkedin_mcp_server.config import get_config from linkedin_mcp_server.config.providers import ( + clear_cookie_from_keyring, get_cookie_from_keyring, save_cookie_to_keyring, - clear_cookie_from_keyring, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError # Constants for cookie validation -MIN_COOKIE_LENGTH = 20 -MIN_RAW_COOKIE_LENGTH = 10 +MIN_RAW_COOKIE_LENGTH = 110 +MIN_COOKIE_LENGTH = MIN_RAW_COOKIE_LENGTH + len("li_at=") logger = logging.getLogger(__name__) From 9668111b14dc4943f6ceda8878c3501987ac4da9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:04:01 -0400 Subject: [PATCH 131/565] refactor(setup): update type hints for credentials and cookies --- linkedin_mcp_server/config/secrets.py | 4 ++-- linkedin_mcp_server/setup.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index a5071aab..25b8dd98 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -8,11 +8,11 @@ from linkedin_mcp_server.exceptions import CredentialsNotFoundError from .providers import ( + get_cookie_from_keyring, get_credentials_from_keyring, get_keyring_name, - save_credentials_to_keyring, - get_cookie_from_keyring, save_cookie_to_keyring, + save_credentials_to_keyring, ) logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 5208cb1e..793a1fdc 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -78,7 +78,7 @@ def prompt_for_credentials() -> Dict[str, str]: inquirer.Text("email", message="LinkedIn Email"), inquirer.Password("password", message="LinkedIn Password"), ] - credentials: dict[str, str] = inquirer.prompt(questions) + credentials: Dict[str, str] = inquirer.prompt(questions) if not credentials: raise KeyboardInterrupt("Credential input was cancelled") @@ -172,7 +172,7 @@ def capture_cookie_from_credentials(email: str, password: str) -> str: ) # Capture cookie - cookie_obj: dict[str, str] = driver.get_cookie("li_at") + cookie_obj: Dict[str, str] = driver.get_cookie("li_at") if cookie_obj and cookie_obj.get("value"): cookie: str = cookie_obj["value"] logger.info("Successfully captured session cookie") @@ -327,7 +327,7 @@ def run_cookie_extraction_setup() -> str: print("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction") # Get credentials - credentials: dict[str, str] = get_credentials_for_setup() + credentials: Dict[str, str] = get_credentials_for_setup() # Capture cookie cookie: str = capture_cookie_from_credentials( From ce057357028f7f80cee8d08f62da2b375b797d5c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:08:07 -0400 Subject: [PATCH 132/565] refactor(authentication): remove legacy duplicated cookie handling functions --- README.md | 1 + linkedin_mcp_server/config/secrets.py | 89 --------------------------- 2 files changed, 1 insertion(+), 89 deletions(-) diff --git a/README.md b/README.md index 1d83e85c..21cca9cc 100644 --- a/README.md +++ b/README.md @@ -259,6 +259,7 @@ uv run main.py --no-headless --no-lazy-init - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--get-cookie` - Login with email and password and extract the LinkedIn cookie - `--cookie {cookie}` - Pass a specific LinkedIn cookie for login +- `--help` - Show help **Claude Desktop:** ```**json** diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index 25b8dd98..c01dc1ba 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -8,60 +8,14 @@ from linkedin_mcp_server.exceptions import CredentialsNotFoundError from .providers import ( - get_cookie_from_keyring, get_credentials_from_keyring, get_keyring_name, - save_cookie_to_keyring, save_credentials_to_keyring, ) logger = logging.getLogger(__name__) -def has_authentication() -> bool: - """Check if authentication is available without triggering interactive setup.""" - config = get_config() - - # Check environment variable - if config.linkedin.cookie: - return True - - # Check keyring if enabled - if config.linkedin.use_keyring: - cookie = get_cookie_from_keyring() - if cookie: - return True - - return False - - -def get_authentication() -> str: - """Get LinkedIn cookie from keyring, environment, or interactive setup.""" - config = get_config() - - # First, try environment variable - if config.linkedin.cookie: - logger.info("Using LinkedIn cookie from environment") - return config.linkedin.cookie - - # Second, try keyring if enabled - if config.linkedin.use_keyring: - cookie = get_cookie_from_keyring() - if cookie: - logger.info(f"Using LinkedIn cookie from {get_keyring_name()}") - return cookie - - # If in non-interactive mode and no cookie found, raise error - if config.chrome.non_interactive: - raise CredentialsNotFoundError( - "No LinkedIn cookie found. Please provide cookie via " - "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." - ) - - # Otherwise, prompt for cookie or setup - return prompt_for_authentication() - - def get_credentials() -> Dict[str, str]: """Get LinkedIn credentials from config, keyring, or prompt (legacy for --get-cookie).""" config = get_config() @@ -89,49 +43,6 @@ def get_credentials() -> Dict[str, str]: return prompt_for_credentials() -def prompt_for_authentication() -> str: - """Prompt user for LinkedIn cookie or setup via login.""" - print("๐Ÿ”— LinkedIn MCP Server Setup") - - # Ask if user has a cookie - has_cookie = inquirer.confirm("Do you have a LinkedIn cookie?", default=False) - - if has_cookie: - cookie = inquirer.text("LinkedIn Cookie", validate=lambda _, x: len(x) > 10) - if save_cookie_to_keyring(cookie): - logger.info(f"Cookie stored securely in {get_keyring_name()}") - else: - logger.warning("Could not store cookie in system keyring.") - logger.info("Your cookie will only be used for this session.") - return cookie - else: - # Login flow to get cookie - return setup_cookie_from_login() - - -def setup_cookie_from_login() -> str: - """Login with credentials and capture cookie.""" - from linkedin_mcp_server.setup import capture_cookie_from_credentials - - print("๐Ÿ”‘ LinkedIn login required to obtain cookie") - credentials = prompt_for_credentials() - - # Use existing cookie capture functionality - cookie = capture_cookie_from_credentials( - credentials["email"], credentials["password"] - ) - - if cookie: - if save_cookie_to_keyring(cookie): - logger.info(f"Cookie stored securely in {get_keyring_name()}") - else: - logger.warning("Could not store cookie in system keyring.") - logger.info("Your cookie will only be used for this session.") - return cookie - else: - raise CredentialsNotFoundError("Failed to obtain LinkedIn cookie") - - def prompt_for_credentials() -> Dict[str, str]: """Prompt user for LinkedIn credentials and store them securely.""" print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") From 0aec6a72e4770679986197e18e5d76fabd63778c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:11:14 -0400 Subject: [PATCH 133/565] chore(version): bump version to 1.1.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f0f049b3..f99eaf6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.0.8" +version = "1.1.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 6a1638f1..ddb27de6 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.0.8" +version = "1.1.0" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 2cfddc324f43905b2fe248fde4c485b149851061 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 17:13:24 +0000 Subject: [PATCH 134/565] chore(dxt): update manifest.json version to v1.1.0 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 950a5ba9..83b9f496 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.0.8", + "version": "1.1.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 4597d99570f529127efaedec9e2d995a84d61a3f Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 6 Jul 2025 13:14:44 -0400 Subject: [PATCH 135/565] Update README.md --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 21cca9cc..8f68b751 100644 --- a/README.md +++ b/README.md @@ -90,7 +90,6 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c 6. Use this value as your `LINKEDIN_COOKIE` in the configuration
-
> [!NOTE] > The cookie will expire during the next 30 days. Just get the new cookie and update your config. @@ -187,7 +186,6 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c 6. Use this value as your `LINKEDIN_COOKIE` in the configuration
-
> [!NOTE] > The cookie will expire during the next 30 days. Just get the new cookie and update your config. From e5dd94de04609895b97d3c137c4021c9d9152a44 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:59:26 -0400 Subject: [PATCH 136/565] chore(manifest): update Docker image tag to latest --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 83b9f496..e8481f2a 100644 --- a/manifest.json +++ b/manifest.json @@ -25,7 +25,7 @@ "args": [ "run", "-i", "--rm", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", - "stickerdaniel/linkedin-mcp-server" + "stickerdaniel/linkedin-mcp-server:latest" ] } }, From a6b3147f3dddf6d96c8395000476a391724648de Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 13:59:36 -0400 Subject: [PATCH 137/565] refactor(drivers): improve session management and cleanup --- linkedin_mcp_server/drivers/chrome.py | 37 +++++++++++++++++++--- linkedin_mcp_server/server.py | 44 ++++++++------------------- linkedin_mcp_server/setup.py | 6 ++++ 3 files changed, 52 insertions(+), 35 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 7b6243cd..4150a6d2 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -8,6 +8,8 @@ import logging import os +import shutil +import tempfile from typing import Dict, Optional from linkedin_scraper.exceptions import ( @@ -32,13 +34,19 @@ # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} +# Store user data directories for cleanup +user_data_dirs: Dict[str, str] = {} + logger = logging.getLogger(__name__) -def create_chrome_driver() -> webdriver.Chrome: +def create_chrome_driver(session_id: str = "default") -> webdriver.Chrome: """ Create a new Chrome WebDriver instance with proper configuration. + Args: + session_id: Unique identifier for the session (used for cleanup) + Returns: webdriver.Chrome: Configured Chrome WebDriver instance @@ -63,6 +71,14 @@ def create_chrome_driver() -> webdriver.Chrome: chrome_options.add_argument("--disable-extensions") chrome_options.add_argument("--disable-background-timer-throttling") + # Create a unique user data directory to avoid conflicts + user_data_dir = tempfile.mkdtemp(prefix="linkedin_mcp_chrome_") + chrome_options.add_argument(f"--user-data-dir={user_data_dir}") + logger.debug(f"Using Chrome user data directory: {user_data_dir}") + + # Store the user data directory for cleanup + user_data_dirs[session_id] = user_data_dir + # Set user agent (configurable with sensible default) user_agent = getattr(config.chrome, "user_agent", DEFAULT_USER_AGENT) chrome_options.add_argument(f"--user-agent={user_agent}") @@ -213,7 +229,7 @@ def get_or_create_driver(authentication: str) -> webdriver.Chrome: try: # Create new driver - driver = create_chrome_driver() + driver = create_chrome_driver(session_id) # Login to LinkedIn login_to_linkedin(driver, authentication) @@ -245,7 +261,7 @@ def get_or_create_driver(authentication: str) -> webdriver.Chrome: def close_all_drivers() -> None: """Close all active drivers and clean up resources.""" - global active_drivers + global active_drivers, user_data_dirs for session_id, driver in active_drivers.items(): try: @@ -254,8 +270,21 @@ def close_all_drivers() -> None: except Exception as e: logger.warning(f"Error closing driver {session_id}: {e}") + # Clean up user data directory + if session_id in user_data_dirs: + try: + user_data_dir = user_data_dirs[session_id] + if os.path.exists(user_data_dir): + shutil.rmtree(user_data_dir) + logger.debug(f"Cleaned up user data directory: {user_data_dir}") + except Exception as e: + logger.warning( + f"Error cleaning up user data directory for session {session_id}: {e}" + ) + active_drivers.clear() - logger.info("All Chrome WebDriver sessions closed") + user_data_dirs.clear() + logger.info("All Chrome WebDriver sessions closed and cleaned up") def get_active_driver() -> Optional[webdriver.Chrome]: diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index c99afdda..601ba322 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -10,7 +10,6 @@ from fastmcp import FastMCP -from linkedin_mcp_server.drivers.chrome import active_drivers from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools @@ -31,25 +30,18 @@ def create_mcp_server() -> FastMCP: @mcp.tool() async def close_session() -> Dict[str, Any]: """Close the current browser session and clean up resources.""" - session_id = "default" # Using the same default session + from linkedin_mcp_server.drivers.chrome import close_all_drivers - if session_id in active_drivers: - try: - active_drivers[session_id].quit() - del active_drivers[session_id] - return { - "status": "success", - "message": "Successfully closed the browser session", - } - except Exception as e: - return { - "status": "error", - "message": f"Error closing browser session: {str(e)}", - } - else: + try: + close_all_drivers() + return { + "status": "success", + "message": "Successfully closed the browser session and cleaned up resources", + } + except Exception as e: return { - "status": "warning", - "message": "No active browser session to close", + "status": "error", + "message": f"Error closing browser session: {str(e)}", } return mcp @@ -57,16 +49,6 @@ async def close_session() -> Dict[str, Any]: def shutdown_handler() -> None: """Clean up resources on shutdown.""" - for session_id, driver in list(active_drivers.items()): - try: - driver.quit() - del active_drivers[session_id] - except Exception as e: - logger.error( - f"Error closing driver during shutdown: {e}", - extra={ - "session_id": session_id, - "exception_type": type(e).__name__, - "exception_message": str(e), - }, - ) + from linkedin_mcp_server.drivers.chrome import close_all_drivers + + close_all_drivers() diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 793a1fdc..9ce0c8f2 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -7,6 +7,7 @@ import logging import os +import tempfile from contextlib import contextmanager from typing import Dict, Iterator @@ -119,6 +120,11 @@ def temporary_chrome_driver() -> Iterator[webdriver.Chrome]: # chrome_options.add_argument("--disable-gpu") # chrome_options.add_argument("--window-size=3456,2234") + # Create a unique user data directory to avoid conflicts + user_data_dir = tempfile.mkdtemp(prefix="linkedin_mcp_setup_") + chrome_options.add_argument(f"--user-data-dir={user_data_dir}") + logger.debug(f"Using Chrome user data directory for setup: {user_data_dir}") + driver = None try: # Create temporary driver From 533e43d65db0419f64c2b71d2b768688598b920a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 14:00:10 -0400 Subject: [PATCH 138/565] chore(version): bump version to 1.1.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f99eaf6a..13126955 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.0" +version = "1.1.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index ddb27de6..09c2b438 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.0" +version = "1.1.1" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From f2f2e28981f591350b04a02d7fe35739380d199f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 18:00:38 +0000 Subject: [PATCH 139/565] chore(dxt): update manifest.json version to v1.1.1 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index e8481f2a..2fbde991 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.0", + "version": "1.1.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 84295f3ab48572463c4c86b9123977d8b3314e22 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 14:47:28 -0400 Subject: [PATCH 140/565] refactor(setup): streamline Chrome driver creation and options --- linkedin_mcp_server/drivers/chrome.py | 139 ++++++++++++++++++-------- linkedin_mcp_server/setup.py | 42 +------- main.py | 28 ++++-- 3 files changed, 119 insertions(+), 90 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 4150a6d2..902ecb0b 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -8,8 +8,6 @@ import logging import os -import shutil -import tempfile from typing import Dict, Optional from linkedin_scraper.exceptions import ( @@ -34,29 +32,22 @@ # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} -# Store user data directories for cleanup -user_data_dirs: Dict[str, str] = {} logger = logging.getLogger(__name__) -def create_chrome_driver(session_id: str = "default") -> webdriver.Chrome: +def create_chrome_options(config) -> Options: """ - Create a new Chrome WebDriver instance with proper configuration. + Create Chrome options with all necessary configuration for LinkedIn scraping. Args: - session_id: Unique identifier for the session (used for cleanup) + config: AppConfig instance with Chrome configuration Returns: - webdriver.Chrome: Configured Chrome WebDriver instance - - Raises: - WebDriverException: If driver creation fails + Options: Configured Chrome options object """ - config = get_config() - - # Set up Chrome options chrome_options = Options() + logger.info( f"Running browser in {'headless' if config.chrome.headless else 'visible'} mode" ) @@ -70,14 +61,15 @@ def create_chrome_driver(session_id: str = "default") -> webdriver.Chrome: chrome_options.add_argument("--window-size=1920,1080") chrome_options.add_argument("--disable-extensions") chrome_options.add_argument("--disable-background-timer-throttling") - - # Create a unique user data directory to avoid conflicts - user_data_dir = tempfile.mkdtemp(prefix="linkedin_mcp_chrome_") - chrome_options.add_argument(f"--user-data-dir={user_data_dir}") - logger.debug(f"Using Chrome user data directory: {user_data_dir}") - - # Store the user data directory for cleanup - user_data_dirs[session_id] = user_data_dir + chrome_options.add_argument("--disable-background-networking") + chrome_options.add_argument("--disable-default-apps") + chrome_options.add_argument("--disable-sync") + chrome_options.add_argument("--metrics-recording-only") + chrome_options.add_argument("--no-default-browser-check") + chrome_options.add_argument("--no-first-run") + chrome_options.add_argument("--disable-features=TranslateUI,BlinkGenPropertyTrees") + chrome_options.add_argument("--aggressive-cache-discard") + chrome_options.add_argument("--disable-ipc-flooding-protection") # Set user agent (configurable with sensible default) user_agent = getattr(config.chrome, "user_agent", DEFAULT_USER_AGENT) @@ -87,9 +79,19 @@ def create_chrome_driver(session_id: str = "default") -> webdriver.Chrome: for arg in config.chrome.browser_args: chrome_options.add_argument(arg) - # Initialize Chrome driver - logger.info("Initializing Chrome WebDriver...") + return chrome_options + +def create_chrome_service(config): + """ + Create Chrome service with ChromeDriver path resolution. + + Args: + config: AppConfig instance with Chrome configuration + + Returns: + Service or None: Chrome service if path is configured, None for auto-detection + """ # Use ChromeDriver path from environment or config chromedriver_path = ( os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path @@ -97,10 +99,76 @@ def create_chrome_driver(session_id: str = "default") -> webdriver.Chrome: if chromedriver_path: logger.info(f"Using ChromeDriver at path: {chromedriver_path}") - service = Service(executable_path=chromedriver_path) - driver = webdriver.Chrome(service=service, options=chrome_options) + return Service(executable_path=chromedriver_path) else: logger.info("Using auto-detected ChromeDriver") + return None + + +def create_temporary_chrome_driver() -> webdriver.Chrome: + """ + Create a temporary Chrome WebDriver instance for one-off operations. + + This driver is NOT stored in the global active_drivers dict and should be + manually cleaned up by the caller. + + Returns: + webdriver.Chrome: Configured Chrome WebDriver instance + + Raises: + WebDriverException: If driver creation fails + """ + config = get_config() + + logger.info("Creating temporary Chrome WebDriver...") + + # Create Chrome options using shared function + chrome_options = create_chrome_options(config) + + # Create Chrome service using shared function + service = create_chrome_service(config) + + # Initialize Chrome driver + if service: + driver = webdriver.Chrome(service=service, options=chrome_options) + else: + driver = webdriver.Chrome(options=chrome_options) + + logger.info("Temporary Chrome WebDriver created successfully") + + # Add a page load timeout for safety + driver.set_page_load_timeout(60) + + # Set shorter implicit wait for faster operations + driver.implicitly_wait(10) + + return driver + + +def create_chrome_driver() -> webdriver.Chrome: + """ + Create a new Chrome WebDriver instance with proper configuration. + + Returns: + webdriver.Chrome: Configured Chrome WebDriver instance + + Raises: + WebDriverException: If driver creation fails + """ + config = get_config() + + logger.info("Initializing Chrome WebDriver...") + + # Create Chrome options using shared function + chrome_options = create_chrome_options(config) + + # Create Chrome service using shared function + service = create_chrome_service(config) + + # Initialize Chrome driver + if service: + driver = webdriver.Chrome(service=service, options=chrome_options) + else: driver = webdriver.Chrome(options=chrome_options) logger.info("Chrome WebDriver initialized successfully") @@ -229,7 +297,7 @@ def get_or_create_driver(authentication: str) -> webdriver.Chrome: try: # Create new driver - driver = create_chrome_driver(session_id) + driver = create_chrome_driver() # Login to LinkedIn login_to_linkedin(driver, authentication) @@ -261,7 +329,7 @@ def get_or_create_driver(authentication: str) -> webdriver.Chrome: def close_all_drivers() -> None: """Close all active drivers and clean up resources.""" - global active_drivers, user_data_dirs + global active_drivers for session_id, driver in active_drivers.items(): try: @@ -270,21 +338,8 @@ def close_all_drivers() -> None: except Exception as e: logger.warning(f"Error closing driver {session_id}: {e}") - # Clean up user data directory - if session_id in user_data_dirs: - try: - user_data_dir = user_data_dirs[session_id] - if os.path.exists(user_data_dir): - shutil.rmtree(user_data_dir) - logger.debug(f"Cleaned up user data directory: {user_data_dir}") - except Exception as e: - logger.warning( - f"Error cleaning up user data directory for session {session_id}: {e}" - ) - active_drivers.clear() - user_data_dirs.clear() - logger.info("All Chrome WebDriver sessions closed and cleaned up") + logger.info("All Chrome WebDriver sessions closed") def get_active_driver() -> Optional[webdriver.Chrome]: diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 9ce0c8f2..7a7ec3e4 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -6,14 +6,11 @@ """ import logging -import os -import tempfile from contextlib import contextmanager from typing import Dict, Iterator import inquirer from selenium import webdriver -from selenium.webdriver.chrome.options import Options from linkedin_mcp_server.authentication import store_authentication from linkedin_mcp_server.config import get_config @@ -105,44 +102,13 @@ def temporary_chrome_driver() -> Iterator[webdriver.Chrome]: Raises: Exception: If driver creation fails """ - config: AppConfig = get_config() - - logger.info("Creating temporary browser for cookie capture...") - - # Set up Chrome options for cookie capture - chrome_options = Options() - if config.chrome.headless: - chrome_options.add_argument("--headless=new") - - # Add essential options - # chrome_options.add_argument("--no-sandbox") - # chrome_options.add_argument("--disable-dev-shm-usage") - # chrome_options.add_argument("--disable-gpu") - # chrome_options.add_argument("--window-size=3456,2234") - - # Create a unique user data directory to avoid conflicts - user_data_dir = tempfile.mkdtemp(prefix="linkedin_mcp_setup_") - chrome_options.add_argument(f"--user-data-dir={user_data_dir}") - logger.debug(f"Using Chrome user data directory for setup: {user_data_dir}") + from linkedin_mcp_server.drivers.chrome import create_temporary_chrome_driver driver = None try: - # Create temporary driver - chromedriver_path = ( - os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path - ) - - if chromedriver_path: - from selenium.webdriver.chrome.service import Service - - service = Service(executable_path=chromedriver_path) - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - driver = webdriver.Chrome(options=chrome_options) - - driver.set_page_load_timeout(60) + # Create temporary driver using shared function + driver = create_temporary_chrome_driver() yield driver - finally: if driver: driver.quit() @@ -174,7 +140,7 @@ def capture_cookie_from_credentials(email: str, password: str) -> str: email, password, timeout=60, # longer timeout for login (captcha, mobile verification, etc.) - interactive=interactive, # Respect configuration setting + interactive=interactive, # type: ignore # Respect configuration setting ) # Capture cookie diff --git a/main.py b/main.py index 10a32ef9..8932d58c 100644 --- a/main.py +++ b/main.py @@ -80,16 +80,20 @@ def get_cookie_and_exit() -> None: print(cookie) # Try to copy to clipboard + clipboard_success = False try: import pyperclip pyperclip.copy(cookie) + clipboard_success = True + print("๐Ÿ“‹ Cookie copied to clipboard!") + except Exception as e: + logger.debug(f"pyperclip clipboard failed: {e}") + + if not clipboard_success: print( - "๐Ÿ“‹ Cookie copied to clipboard! Now you can set the LINKEDIN_COOKIE environment variable in your configuration" + "๐Ÿ’ก Set this cookie as an environment variable in your config or pass it with --cookie flag" ) - except Exception as e: - logger.warning(f"Could not copy to clipboard: {e}") - print("โš ๏ธ Copy the cookie above manually") except Exception as e: logger.error(f"Error getting cookie: {e}") @@ -108,7 +112,7 @@ def get_cookie_and_exit() -> None: print("\n๐Ÿช To get your LinkedIn cookie manually:") print(" 1. Login to LinkedIn in your browser") print(" 2. Open Developer Tools (F12)") - print(" 3. Go to Application/Storage > Cookies > linkedin.com") + print(" 3. Go to Application/Storage > Cookies > www.linkedin.com") print(" 4. Copy the 'li_at' cookie value") print(" 5. Set LINKEDIN_COOKIE environment variable or use --cookie flag") elif "invalid credentials" in error_msg: @@ -144,8 +148,9 @@ def ensure_authentication_ready() -> str: # If in non-interactive mode and no auth, fail immediately if config.chrome.non_interactive: raise CredentialsNotFoundError( - "No LinkedIn authentication found. Please provide cookie via " - "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." + "No LinkedIn cookie found for non-interactive mode. You can:\n" + " 1. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie\n" + " 2. Run with --get-cookie to extract a cookie using email/password" ) # Run interactive setup @@ -211,9 +216,12 @@ def main() -> None: logger.info("Authentication ready") except CredentialsNotFoundError as e: logger.error(f"Authentication setup failed: {e}") - print( - "\nโŒ Authentication required - please provide LinkedIn cookie or credentials" - ) + if config.chrome.non_interactive: + print("\nโŒ LinkedIn cookie required for Docker/non-interactive mode") + else: + print( + "\nโŒ Authentication required - please provide LinkedIn authentication" + ) sys.exit(1) except KeyboardInterrupt: print("\n\n๐Ÿ‘‹ Setup cancelled by user") From 211a987f284e936ed9abbc4937096f6e2f34e215 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 14:47:46 -0400 Subject: [PATCH 141/565] chore(version): bump version to 1.1.2 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 13126955..03b45c01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.1" +version = "1.1.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 09c2b438..b3c5e494 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.1" +version = "1.1.2" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 6fa91ec0e22540b9a49a97adc539f7447313f43a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 18:48:16 +0000 Subject: [PATCH 142/565] chore(dxt): update manifest.json version to v1.1.2 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 2fbde991..8bc43410 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.1", + "version": "1.1.2", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 3f3d13a2a47a41b583763ca2a0b0d321a372782a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:09:46 -0400 Subject: [PATCH 143/565] feat(logging): include version in startup logs --- main.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/main.py b/main.py index 8932d58c..c7dc4d5f 100644 --- a/main.py +++ b/main.py @@ -68,7 +68,15 @@ def get_cookie_and_exit() -> None: json_format=config.chrome.non_interactive and not config.server.debug, ) - logger.info("LinkedIn MCP Server - Cookie Extraction mode started") + # Get version for logging + try: + import importlib.metadata + + version = importlib.metadata.version("linkedin-mcp-server") + except Exception: + version = "unknown" + + logger.info(f"LinkedIn MCP Server v{version} - Cookie Extraction mode started") try: # Run cookie extraction setup @@ -190,8 +198,16 @@ def initialize_driver_with_auth(authentication: str) -> None: def main() -> None: """Main application entry point with clear phase separation.""" - logger.info("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") - print("๐Ÿ”— LinkedIn MCP Server ๐Ÿ”—") + # Get version from package metadata + try: + import importlib.metadata + + version = importlib.metadata.version("linkedin-mcp-server") + except Exception: + version = "unknown" + + logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") print("=" * 40) # Get configuration From 022444e9fd1cda056a9440e37a9994f17a2ec4bf Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:10:00 -0400 Subject: [PATCH 144/565] chore(version): bump version to 1.1.3 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 03b45c01..08b6fb38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.2" +version = "1.1.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index b3c5e494..3610ee8c 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.2" +version = "1.1.3" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 641457cc718b5b0b85fc29e533d53a03a01abc06 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 21:10:20 +0000 Subject: [PATCH 145/565] chore(dxt): update manifest.json version to v1.1.3 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 8bc43410..c1d23542 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.2", + "version": "1.1.3", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 7de5bbb359b30085b2ccb9b4965bf25fb11ca5bf Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:51:25 -0400 Subject: [PATCH 146/565] refactor(person): rename parameter and construct URL to accept username instea --- linkedin_mcp_server/tools/person.py | 7 +++++-- manifest.json | 6 +++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index f1f6d706..6a24f8c5 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -25,17 +25,20 @@ def register_person_tools(mcp: FastMCP) -> None: """ @mcp.tool() - async def get_person_profile(linkedin_url: str) -> Dict[str, Any]: + async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: """ Scrape a person's LinkedIn profile. Args: - linkedin_url (str): The LinkedIn URL of the person's profile + linkedin_username (str): LinkedIn username (e.g., "john-doe-123456", "sarah-smith", "stickerdaniel") Returns: Dict[str, Any]: Structured data from the person's profile """ try: + # Construct clean LinkedIn URL from username + linkedin_url = f"https://www.linkedin.com/in/{linkedin_username}/" + driver = safe_get_driver() logger.info(f"Scraping profile: {linkedin_url}") diff --git a/manifest.json b/manifest.json index c1d23542..eddc21cd 100644 --- a/manifest.json +++ b/manifest.json @@ -36,12 +36,12 @@ "parameters": { "type": "object", "properties": { - "profile_url": { + "linkedin_username": { "type": "string", - "description": "LinkedIn profile URL (e.g., https://www.linkedin.com/in/stickerdaniel/)" + "description": "LinkedIn username (e.g., \"stickerdaniel\", \"john-doe-123456\")" } }, - "required": ["profile_url"] + "required": ["linkedin_username"] } }, { From 73dcc69a2ea282c82738a7885d46b52c5fefc835 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:54:47 -0400 Subject: [PATCH 147/565] refactor(company): rename parameter and update URL construction --- linkedin_mcp_server/tools/company.py | 7 +++++-- manifest.json | 11 ++++++++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index f64879be..f3a3810c 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -26,19 +26,22 @@ def register_company_tools(mcp: FastMCP) -> None: @mcp.tool() async def get_company_profile( - linkedin_url: str, get_employees: bool = False + company_name: str, get_employees: bool = False ) -> Dict[str, Any]: """ Scrape a company's LinkedIn profile. Args: - linkedin_url (str): The LinkedIn URL of the company's profile + company_name (str): LinkedIn company name (e.g., "docker", "anthropic", "microsoft") get_employees (bool): Whether to scrape the company's employees (slower) Returns: Dict[str, Any]: Structured data from the company's profile """ try: + # Construct clean LinkedIn URL from company name + linkedin_url = f"https://www.linkedin.com/company/{company_name}/" + driver = safe_get_driver() logger.info(f"Scraping company: {linkedin_url}") diff --git a/manifest.json b/manifest.json index eddc21cd..7d3b28b4 100644 --- a/manifest.json +++ b/manifest.json @@ -50,12 +50,17 @@ "parameters": { "type": "object", "properties": { - "company_url": { + "company_name": { "type": "string", - "description": "LinkedIn company URL (e.g., https://www.linkedin.com/company/docker/)" + "description": "LinkedIn company name (e.g., \"docker\", \"anthropic\", \"microsoft\")" + }, + "get_employees": { + "type": "boolean", + "description": "Whether to scrape the company's employees (slower)", + "default": false } }, - "required": ["company_url"] + "required": ["company_name"] } }, { From 2a5ef0b5d0541887f645a6201dcafafdc0409c47 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:55:45 -0400 Subject: [PATCH 148/565] refactor(job): rename parameter to job_id in get_job_details --- linkedin_mcp_server/tools/job.py | 10 +++++----- manifest.json | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 05a8f310..1ad04e12 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -29,24 +29,24 @@ def register_job_tools(mcp: FastMCP) -> None: """ @mcp.tool() - async def get_job_details(job_url: str) -> Dict[str, Any]: + async def get_job_details(job_id: str) -> Dict[str, Any]: """ Scrape job details from a LinkedIn job posting. - IMPORTANT: Only use direct LinkedIn job URLs in the format: - https://www.linkedin.com/jobs/view/XXXXXXXX/ where XXXXXXXX is the job ID. - This tool extracts comprehensive job information including title, company, location, posting date, application count, and full job description. Args: - job_url (str): The LinkedIn job posting URL to scrape + job_id (str): LinkedIn job ID (e.g., "4252026496", "3856789012") Returns: Dict[str, Any]: Structured job data including title, company, location, posting date, application count, and job description (may be empty if content is protected) """ try: + # Construct clean LinkedIn URL from job ID + job_url = f"https://www.linkedin.com/jobs/view/{job_id}/" + driver = safe_get_driver() logger.info(f"Scraping job: {job_url}") diff --git a/manifest.json b/manifest.json index 7d3b28b4..a0a6e407 100644 --- a/manifest.json +++ b/manifest.json @@ -65,16 +65,16 @@ }, { "name": "get_job_details", - "description": "Retrieve specific job posting details using LinkedIn job URLs", + "description": "Retrieve specific job posting details using LinkedIn job IDs", "parameters": { "type": "object", "properties": { - "job_url": { + "job_id": { "type": "string", - "description": "LinkedIn job URL (e.g., https://www.linkedin.com/jobs/view/123456789)" + "description": "LinkedIn job ID (e.g., \"4252026496\", \"3856789012\")" } }, - "required": ["job_url"] + "required": ["job_id"] } }, { From ecb902784b3c2875d6a48e87f7ee800eb9f8b181 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:56:00 -0400 Subject: [PATCH 149/565] chore(version): bump version to 1.1.4 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 08b6fb38..a04bedd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.3" +version = "1.1.4" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 3610ee8c..d3bb4d4a 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.3" +version = "1.1.4" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 9a2415bbb09802c1738129976fc292eb4997240e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 21:56:24 +0000 Subject: [PATCH 150/565] chore(dxt): update manifest.json version to v1.1.4 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index a0a6e407..d9157ad2 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.3", + "version": "1.1.4", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From a8052c9f88c49437e9a40dd1ca045014f41f7af6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 17:57:40 -0400 Subject: [PATCH 151/565] docs(readme): update LinkedIn setup instructions --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 8f68b751..6f76b66a 100644 --- a/README.md +++ b/README.md @@ -158,8 +158,7 @@ docker run -i --rm \ **One-click installation** for Claude Desktop users: 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) 2. Double-click to install into Claude Desktop -3. Configure your LinkedIn credentials when prompted -4. Start using LinkedIn tools immediately +3. Set your LinkedIn cookie in the extension settings ### Getting the LinkedIn Cookie
From 3bbe92eedbca15d85dde68cbcb3ce72626dbf01a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 18:19:55 -0400 Subject: [PATCH 152/565] feat(stdout): suppress stdout in MCP stdio mode if --no-setup flag is set --- main.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/main.py b/main.py index c7dc4d5f..a05e24fb 100644 --- a/main.py +++ b/main.py @@ -9,6 +9,7 @@ """ import logging +import os import sys from typing import Literal @@ -28,6 +29,7 @@ ) from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.schema import AppConfig from linkedin_mcp_server.drivers.chrome import close_all_drivers, get_or_create_driver from linkedin_mcp_server.exceptions import CredentialsNotFoundError, LinkedInMCPError from linkedin_mcp_server.logging_config import configure_logging @@ -37,6 +39,11 @@ logger = logging.getLogger(__name__) +def should_suppress_stdout(config: AppConfig) -> bool: + """Check if stdout should be suppressed to avoid interfering with MCP stdio protocol.""" + return not config.server.setup and config.server.transport == "stdio" + + def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: """Prompt user for transport mode using inquirer.""" questions = [ @@ -213,6 +220,10 @@ def main() -> None: # Get configuration config = get_config() + # Suppress stdout if running in MCP stdio mode to avoid interfering with JSON-RPC protocol + if should_suppress_stdout(config): + sys.stdout = open(os.devnull, "w") + # Handle --get-cookie flag immediately if config.server.get_cookie: get_cookie_and_exit() From 41512f1ed8b02484dd29373dd532832afea818b7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 18:20:17 -0400 Subject: [PATCH 153/565] chore(version): bump version to 1.1.5 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a04bedd2..6bbb47c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.4" +version = "1.1.5" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index d3bb4d4a..f1f5e08c 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.4" +version = "1.1.5" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 8755670c79d19d127100f605e7cd11b9bade9766 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 22:20:42 +0000 Subject: [PATCH 154/565] chore(dxt): update manifest.json version to v1.1.5 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index d9157ad2..8827d349 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.4", + "version": "1.1.5", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 1ee581ac4a626daad77ceb5256117762ba67b5fc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 18:57:16 -0400 Subject: [PATCH 155/565] fix(manifest): add search and recommended jobs features --- README.md | 18 ++++++------------ manifest.json | 23 +++++++++++++++++++++++ 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 6f76b66a..bbec3891 100644 --- a/README.md +++ b/README.md @@ -26,10 +26,10 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Current Status: All Tools Working** > [!TIP] -> - **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections -> - **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details -> - **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs -> - **Job Search** (`search_jobs`): Search for jobs with filters like location, keywords, and experience level +> - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections +> - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name +> - **Job Details** (`get_job_details`): Retrieve specific job posting details using LinkedIn job IDs +> - **Job Search** (`search_jobs`): Search for jobs with filters like keywords and location > - **Recommended Jobs** (`get_recommended_jobs`): Get personalized job recommendations based on your profile > - **Session Management** (`close_session`): Properly close browser session and clean up resources @@ -92,10 +92,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c
> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your config. - -> [!TIP] -> There are also many cookie manager extensions that you can use to easily get the cookie. +> The cookie will expire during the next 30 days. Just get the new cookie and update your config. There are also many cookie manager extensions that you can use to easily get the cookie. ### Docker Setup Help
@@ -187,10 +184,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c
> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your config. - -> [!TIP] -> There are also many cookie manager extensions that you can use to easily get the cookie. +> The cookie will expire during the next 30 days. Just get the new cookie and update your config. There are also many cookie manager extensions that you can use to easily get the cookie. ### DXT Extension Setup Help
diff --git a/manifest.json b/manifest.json index 8827d349..aa9f397d 100644 --- a/manifest.json +++ b/manifest.json @@ -77,6 +77,29 @@ "required": ["job_id"] } }, + { + "name": "search_jobs", + "description": "Search for jobs with filters like keywords and location", + "parameters": { + "type": "object", + "properties": { + "search_term": { + "type": "string", + "description": "Search term for job search (e.g., \"software engineer\", \"product manager\")" + } + }, + "required": ["search_term"] + } + }, + { + "name": "get_recommended_jobs", + "description": "Get personalized job recommendations based on your profile", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + }, { "name": "close_session", "description": "Properly close browser session and clean up resources", From bbac67e0c2f47aa48009c44b0ca006127bbdf116 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 6 Jul 2025 18:58:32 -0400 Subject: [PATCH 156/565] chore(version): bump version to 1.1.6 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6bbb47c9..9e28f25a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.5" +version = "1.1.6" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index f1f5e08c..85618af2 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.5" +version = "1.1.6" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 24a5faffe43054cdc7e69c5e839335ba04fc950c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 6 Jul 2025 22:58:53 +0000 Subject: [PATCH 157/565] chore(dxt): update manifest.json version to v1.1.6 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index aa9f397d..cab700b5 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.5", + "version": "1.1.6", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 204956eaec9d46d76b03e95b709839a7e2c04477 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 01:34:22 -0400 Subject: [PATCH 158/565] docs(readme): update LinkedIn cookie instructions --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index bbec3891..7d3e57c8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # LinkedIn MCP Server -A Model Context Protocol (MCP) server that enables interaction with LinkedIn through Claude and other AI assistants. This server allows you to scrape LinkedIn profiles, companies, jobs, and perform job searches. +Through this LinkedIn MCP server, AI assistants like ChatGPT and Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your local machine. ## Installation Methods @@ -92,7 +92,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c
> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your config. There are also many cookie manager extensions that you can use to easily get the cookie. +> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. ### Docker Setup Help
@@ -184,7 +184,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c
> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your config. There are also many cookie manager extensions that you can use to easily get the cookie. +> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. ### DXT Extension Setup Help
From 901d492912f4d3d1938a416bc74ad8b68da71d43 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 01:34:58 -0400 Subject: [PATCH 159/565] docs(readme): add HTTP transport mode instructions --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index 7d3e57c8..1b565413 100644 --- a/README.md +++ b/README.md @@ -250,8 +250,17 @@ uv run main.py --no-headless --no-lazy-init - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--get-cookie` - Login with email and password and extract the LinkedIn cookie - `--cookie {cookie}` - Pass a specific LinkedIn cookie for login +- `--transport {stdio,streamable-http}` - Set transport mode +- `--host HOST` - HTTP server host (default: 127.0.0.1) +- `--port PORT` - HTTP server port (default: 8000) +- `--path PATH` - HTTP server path (default: /mcp) - `--help` - Show help +**HTTP Mode Example (for web-based MCP clients):** +```bash +uv run main.py --no-setup --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp +``` + **Claude Desktop:** ```**json** { From 2492ce1892733f75850335fad3e2a791d3199587 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 7 Jul 2025 01:41:50 -0400 Subject: [PATCH 160/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1b565413..b3b1bceb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # LinkedIn MCP Server -Through this LinkedIn MCP server, AI assistants like ChatGPT and Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your local machine. +Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your local machine. ## Installation Methods From b041df7fe7b1b65bfef8251b430a29fcfd056e01 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 11:50:30 -0400 Subject: [PATCH 161/565] docs(release): clarify LinkedIn cookie setup instructions --- .github/workflows/release.yml | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8001cd68..eaef23a4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -152,7 +152,7 @@ jobs: ## Update Claude Desktop DXT Extension 1. Download the `.dxt` file below 2. Double-click to open in Claude Desktop - 3. Configure with your LinkedIn credentials + 3. Set your LinkedIn cookie - name: Summary run: | diff --git a/README.md b/README.md index b3b1bceb..d6319aff 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # LinkedIn MCP Server -Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your local machine. +Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your machine. ## Installation Methods From 2bde3c749f9c68df8697341a8fec3206555f01fa Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 13:01:49 -0400 Subject: [PATCH 162/565] docs(readme): add star history section --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index d6319aff..8d765cbf 100644 --- a/README.md +++ b/README.md @@ -307,3 +307,7 @@ MIT License Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. + +## Star History + +[![Star History Chart](https://api.star-history.com/svg?repos=stickerdaniel/linkedin-mcp-server&type=Date)](https://www.star-history.com/#stickerdaniel/linkedin-mcp-server&Date) From bdebb1872d7ace4c41e511c77924e4b09d3142bc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 13:03:02 -0400 Subject: [PATCH 163/565] docs(readme): enhance star history display with dark theme support --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8d765cbf..f91aa077 100644 --- a/README.md +++ b/README.md @@ -310,4 +310,10 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ ## Star History -[![Star History Chart](https://api.star-history.com/svg?repos=stickerdaniel/linkedin-mcp-server&type=Date)](https://www.star-history.com/#stickerdaniel/linkedin-mcp-server&Date) + + + + + Star History Chart + + From c04f5b81b4e5f36d6a348da445b26550030a8487 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 13:05:07 -0400 Subject: [PATCH 164/565] docs(readme): update acknowledgements and license section --- README.md | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f91aa077..ac80434e 100644 --- a/README.md +++ b/README.md @@ -299,14 +299,9 @@ Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-serve

-## License - -MIT License ## Acknowledgements -Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [Model Context Protocol](https://modelcontextprotocol.io/). - -โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [FastMCP](https://gofastmcp.com/). ## Star History @@ -317,3 +312,13 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ Star History Chart + + +## License + +MIT License + +
+
+ +โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. From 83932a715ab987e5387650c3dcebe61a04121707 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 13:05:52 -0400 Subject: [PATCH 165/565] docs(readme): add usage warning for LinkedIn scraping --- README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ac80434e..ed5056a2 100644 --- a/README.md +++ b/README.md @@ -303,6 +303,8 @@ Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-serve ## Acknowledgements Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [FastMCP](https://gofastmcp.com/). +โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. + ## Star History @@ -318,7 +320,4 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ MIT License -
-
- -โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +
From 6ab121dfa011b9314794f1ee9a3fe116301769dc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 13:18:55 -0400 Subject: [PATCH 166/565] docs(readme): simplify star history section markup --- README.md | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index ed5056a2..af4c7b00 100644 --- a/README.md +++ b/README.md @@ -307,13 +307,11 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ ## Star History -
- - - - Star History Chart - - + + + + Star History Chart + ## License From 2ddecdc676a9520044df37d6b1a15372237be818 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 15:23:45 -0400 Subject: [PATCH 167/565] docs(readme): clarify license statement --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index af4c7b00..4046cb13 100644 --- a/README.md +++ b/README.md @@ -316,6 +316,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ ## License -MIT License +This project is licensed under the MIT License
From ce2e0c797726ac3da7c2f9e5214ab5c32d3675ee Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 17:23:32 -0400 Subject: [PATCH 168/565] feat(config): improve transport mode configuration --- .vscode/tasks.json | 13 ++++--------- linkedin_mcp_server/config/loaders.py | 10 ++++++++++ linkedin_mcp_server/config/schema.py | 1 + main.py | 11 +++++++++-- 4 files changed, 24 insertions(+), 11 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index c541f611..1ed815b1 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -85,22 +85,17 @@ "problemMatcher": [] }, { - "label": "uv run main.py --transport streamable-http --no-setup", + "label": "uv run main.py --no-headless --no-lazy-init --transport streamable-http", "detail": "Start HTTP MCP server on localhost:8000/mcp", "type": "shell", "command": "uv", "args": [ "run", "main.py", + "--no-headless", + "--no-lazy-init", "--transport", - "streamable-http", - "--host", - "127.0.0.1", - "--port", - "8000", - "--path", - "/mcp", - "--no-setup" + "streamable-http" ], "isBackground": true, "group": { diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 771d9635..c144adea 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -61,6 +61,15 @@ def load_from_env(config: AppConfig) -> AppConfig: elif os.environ.get("LAZY_INIT") in ("0", "false", "False", "no", "No"): config.server.lazy_init = False + # Transport mode + if transport_env := os.environ.get("TRANSPORT"): + if transport_env == "stdio": + config.server.transport = "stdio" + config.server.transport_explicitly_set = True + elif transport_env == "streamable-http": + config.server.transport = "streamable-http" + config.server.transport_explicitly_set = True + return config @@ -160,6 +169,7 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.transport: config.server.transport = args.transport + config.server.transport_explicitly_set = True if args.host: config.server.host = args.host diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index cf3e0129..20036e76 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -28,6 +28,7 @@ class ServerConfig: """MCP server configuration.""" transport: Literal["stdio", "streamable-http"] = "stdio" + transport_explicitly_set: bool = False # Track if transport was explicitly set lazy_init: bool = True debug: bool = False setup: bool = True diff --git a/main.py b/main.py index a05e24fb..2683f13c 100644 --- a/main.py +++ b/main.py @@ -302,11 +302,18 @@ def main() -> None: # Phase 3: Server Runtime try: - # Decide transport + # Decide transport using the new config system transport = config.server.transport - if config.server.setup: + + # Only show transport prompt if: + # a) we don't have --no-setup flag (config.server.setup is True) AND + # b) transport wasn't explicitly set via CLI/env + if config.server.setup and not config.server.transport_explicitly_set: print("\n๐Ÿš€ Server ready! Choose transport mode:") transport = choose_transport_interactive() + elif not config.server.setup and not config.server.transport_explicitly_set: + # If we have --no-setup and no transport explicitly set, use default (stdio) + transport = config.server.transport # Print configuration for Claude if in setup mode and using stdio transport if config.server.setup and transport == "stdio": From 0ddcd33f2d305cb07060e3cd6771c744943c507b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 17:33:10 -0400 Subject: [PATCH 169/565] feat(config): add keychain clearing functionality --- linkedin_mcp_server/config/__init__.py | 4 ++ linkedin_mcp_server/config/loaders.py | 8 +++ linkedin_mcp_server/config/providers.py | 62 ++++++++++++++++ linkedin_mcp_server/config/schema.py | 1 + main.py | 95 ++++++++++++++++++++++++- 5 files changed, 169 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index a79d7eef..17e4e9ef 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -5,6 +5,8 @@ from .loaders import load_config from .providers import ( clear_credentials_from_keyring, + clear_all_keychain_data, + check_keychain_data_exists, get_credentials_from_keyring, get_keyring_name, save_credentials_to_keyring, @@ -45,5 +47,7 @@ def reset_config() -> None: "get_credentials_from_keyring", "save_credentials_to_keyring", "clear_credentials_from_keyring", + "clear_all_keychain_data", + "check_keychain_data_exists", "get_keyring_name", ] diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index c144adea..3f3f2414 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -143,6 +143,12 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Login with credentials and display cookie for Docker setup", ) + parser.add_argument( + "--clear-keychain", + action="store_true", + help="Clear all stored LinkedIn credentials and cookies from system keychain", + ) + parser.add_argument( "--cookie", type=str, @@ -185,6 +191,8 @@ def load_from_args(config: AppConfig) -> AppConfig: if hasattr(args, "get_cookie") and args.get_cookie: config.server.get_cookie = True + if hasattr(args, "clear_keychain") and args.clear_keychain: + config.server.clear_keychain = True if args.cookie: config.linkedin.cookie = args.cookie diff --git a/linkedin_mcp_server/config/providers.py b/linkedin_mcp_server/config/providers.py index aab30543..8de2e1e8 100644 --- a/linkedin_mcp_server/config/providers.py +++ b/linkedin_mcp_server/config/providers.py @@ -96,6 +96,68 @@ def clear_cookie_from_keyring() -> bool: return False +def check_keychain_data_exists() -> Dict[str, bool]: + """Check what LinkedIn data exists in the keyring.""" + credentials = get_credentials_from_keyring() + cookie = get_cookie_from_keyring() + + return { + "has_email": credentials["email"] is not None, + "has_password": credentials["password"] is not None, + "has_cookie": cookie is not None, + "has_credentials": credentials["email"] is not None + or credentials["password"] is not None, + "has_any": credentials["email"] is not None + or credentials["password"] is not None + or cookie is not None, + } + + +def clear_existing_keychain_data() -> Dict[str, bool]: + """Clear only existing LinkedIn data from the keyring.""" + existing = check_keychain_data_exists() + results = {"credentials_cleared": False, "cookie_cleared": False} + + # Only try to clear credentials if they exist + if existing["has_credentials"]: + try: + if existing["has_email"]: + keyring.delete_password(SERVICE_NAME, EMAIL_KEY) + if existing["has_password"]: + keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) + results["credentials_cleared"] = True + logger.info(f"Credentials removed from {get_keyring_name()}") + except KeyringError as e: + logger.error(f"Error clearing credentials: {e}") + else: + results["credentials_cleared"] = True # Nothing to clear = success + + # Only try to clear cookie if it exists + if existing["has_cookie"]: + try: + keyring.delete_password(SERVICE_NAME, COOKIE_KEY) + results["cookie_cleared"] = True + logger.info(f"Cookie removed from {get_keyring_name()}") + except KeyringError as e: + logger.error(f"Error clearing cookie: {e}") + else: + results["cookie_cleared"] = True # Nothing to clear = success + + return results + + +def clear_all_keychain_data() -> bool: + """Clear all stored LinkedIn data from the keyring (credentials + cookie).""" + results = clear_existing_keychain_data() + + if results["credentials_cleared"] and results["cookie_cleared"]: + logger.info(f"All LinkedIn data cleared from {get_keyring_name()}") + return True + else: + logger.error("Failed to clear some LinkedIn data from keyring") + return False + + def get_chromedriver_paths() -> List[str]: """Get possible ChromeDriver paths based on the platform.""" paths = [ diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 20036e76..501ac96c 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -33,6 +33,7 @@ class ServerConfig: debug: bool = False setup: bool = True get_cookie: bool = False + clear_keychain: bool = False # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 diff --git a/main.py b/main.py index 2683f13c..e0a73104 100644 --- a/main.py +++ b/main.py @@ -28,7 +28,12 @@ has_authentication, ) from linkedin_mcp_server.cli import print_claude_config -from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config import ( + get_config, + clear_all_keychain_data, + check_keychain_data_exists, + get_keyring_name, +) from linkedin_mcp_server.config.schema import AppConfig from linkedin_mcp_server.drivers.chrome import close_all_drivers, get_or_create_driver from linkedin_mcp_server.exceptions import CredentialsNotFoundError, LinkedInMCPError @@ -65,6 +70,90 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: return answers["transport"] +def clear_keychain_and_exit() -> None: + """Clear LinkedIn keychain data and exit.""" + config = get_config() + + # Configure logging - prioritize debug mode over non_interactive + configure_logging( + debug=config.server.debug, + json_format=config.chrome.non_interactive and not config.server.debug, + ) + + # Get version for logging + try: + import importlib.metadata + + version = importlib.metadata.version("linkedin-mcp-server") + except Exception: + version = "unknown" + + logger.info(f"LinkedIn MCP Server v{version} - Keychain Clear mode started") + + # Check what exists in keychain + existing = check_keychain_data_exists() + + # If nothing exists, inform user and exit + if not existing["has_any"]: + print("โ„น๏ธ No LinkedIn data found in keychain") + print("Nothing to clear.") + sys.exit(0) + + # Show confirmation prompt for existing items only + keyring_name = get_keyring_name() + print(f"๐Ÿ”‘ Clear LinkedIn data from {keyring_name}?") + print("This will remove:") + + items_to_remove = [] + if existing["has_credentials"]: + credential_parts = [] + if existing["has_email"]: + credential_parts.append("email") + if existing["has_password"]: + credential_parts.append("password") + items_to_remove.append(f" โ€ข LinkedIn {' and '.join(credential_parts)}") + + if existing["has_cookie"]: + items_to_remove.append(" โ€ข LinkedIn session cookie") + + for item in items_to_remove: + print(item) + print() + + # Get user confirmation + try: + confirmation = ( + input("Are you sure you want to clear this keychain data? (y/N): ") + .strip() + .lower() + ) + if confirmation not in ("y", "yes"): + print("โŒ Operation cancelled") + sys.exit(0) + except KeyboardInterrupt: + print("\nโŒ Operation cancelled") + sys.exit(0) + + try: + # Clear all keychain data + success = clear_all_keychain_data() + + if success: + logger.info("Keychain data cleared successfully") + print("โœ… LinkedIn keychain data cleared successfully!") + else: + logger.error("Failed to clear keychain data") + print("โŒ Failed to clear some keychain data - check logs for details") + sys.exit(1) + + except Exception as e: + logger.error(f"Error clearing keychain: {e}") + print(f"โŒ Error clearing keychain: {e}") + sys.exit(1) + + sys.exit(0) + + def get_cookie_and_exit() -> None: """Get LinkedIn cookie and exit (for Docker setup).""" config = get_config() @@ -224,6 +313,10 @@ def main() -> None: if should_suppress_stdout(config): sys.stdout = open(os.devnull, "w") + # Handle --clear-keychain flag immediately + if config.server.clear_keychain: + clear_keychain_and_exit() + # Handle --get-cookie flag immediately if config.server.get_cookie: get_cookie_and_exit() From 16e5d6a85eb18ee738e1d933bad8dde5bed41bd5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 17:33:57 -0400 Subject: [PATCH 170/565] docs(readme): document keychain clearing option --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 4046cb13..d699a65e 100644 --- a/README.md +++ b/README.md @@ -249,6 +249,7 @@ uv run main.py --no-headless --no-lazy-init - `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_COOKIE` or `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env or that you run the server once manually, so the authentication is stored in your OS keychain and you can run the server without credentials) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--get-cookie` - Login with email and password and extract the LinkedIn cookie +- `--clear-keychain` - Clear all stored LinkedIn credentials and cookies from system keychain - `--cookie {cookie}` - Pass a specific LinkedIn cookie for login - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) From 87367391e01e1ae792ca28baa1b49e6698e99d41 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 17:55:41 -0400 Subject: [PATCH 171/565] fix(main): version retrieval logic for logging --- main.py | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/main.py b/main.py index e0a73104..f5e71265 100644 --- a/main.py +++ b/main.py @@ -81,12 +81,7 @@ def clear_keychain_and_exit() -> None: ) # Get version for logging - try: - import importlib.metadata - - version = importlib.metadata.version("linkedin-mcp-server") - except Exception: - version = "unknown" + version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Keychain Clear mode started") @@ -165,12 +160,7 @@ def get_cookie_and_exit() -> None: ) # Get version for logging - try: - import importlib.metadata - - version = importlib.metadata.version("linkedin-mcp-server") - except Exception: - version = "unknown" + version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Cookie Extraction mode started") @@ -292,15 +282,24 @@ def initialize_driver_with_auth(authentication: str) -> None: raise e -def main() -> None: - """Main application entry point with clear phase separation.""" - # Get version from package metadata +def get_version() -> str: + """Get version from pyproject.toml.""" try: - import importlib.metadata + import tomllib + import os - version = importlib.metadata.version("linkedin-mcp-server") + pyproject_path = os.path.join(os.path.dirname(__file__), "pyproject.toml") + with open(pyproject_path, "rb") as f: + data = tomllib.load(f) + return data["project"]["version"] except Exception: - version = "unknown" + return "unknown" + + +def main() -> None: + """Main application entry point with clear phase separation.""" + # Get version + version = get_version() logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") From 6cde44ad24281cea5d20ed61a8d3b2011ef5d43a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 18:11:32 -0400 Subject: [PATCH 172/565] docs(readme): update LinkedIn cookie login troubleshooting --- README.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d699a65e..db836916 100644 --- a/README.md +++ b/README.md @@ -140,8 +140,9 @@ docker run -i --rm \ - Check if Docker is running: `docker ps` **Login issues:** -- Ensure your LinkedIn credentials are set and correct -- LinkedIn may require a login confirmation in the LinkedIn mobile app +- Ensure your LinkedIn cookie is set and correct +- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually)
@@ -195,8 +196,9 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c - Check if Docker is running: `docker ps` **Login issues:** -- Ensure your LinkedIn credentials are set and correct -- LinkedIn may require a login confirmation in the LinkedIn mobile app +- Ensure your LinkedIn cookie is set and correct +- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) @@ -283,6 +285,7 @@ uv run main.py --no-setup --transport streamable-http --host 127.0.0.1 --port 80 - Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) - Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call - Add `--debug` to see more detailed logging +- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. E.g. if you have a logged in browser session with a docker container, you can't use the same cookie to login with the local setup while the docker container is running / session is not closed. **ChromeDriver issues:** - Ensure Chrome and ChromeDriver versions match From f2041454d1fc592e24f94b15fd770f04b6876fda Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 7 Jul 2025 18:13:31 -0400 Subject: [PATCH 173/565] chore(version): bump version to 1.1.7 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9e28f25a..0640987e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.6" +version = "1.1.7" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 85618af2..78ee4954 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.6" +version = "1.1.7" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From e533c68dc2cfcb21ab8d75f6b0e5fe277d2f2dcb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 7 Jul 2025 22:13:54 +0000 Subject: [PATCH 174/565] chore(dxt): update manifest.json version to v1.1.7 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index cab700b5..0ec9d2f7 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.6", + "version": "1.1.7", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 633878147d8207b6e6781eeaea1df5074e062481 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:04:19 -0400 Subject: [PATCH 175/565] refactor(project): simplify entry point and improve logging Updated the Dockerfile entry poin. Enhanced main.py with clearer logging and configuration handling, including adjustments for interactive and non-interactive modes. Improved documentation in README.md for command usage and options. Changes include: - Removed unnecessary setup flags in entry point. - Refined logging configuration and output. - Updated README for clarity on command-line options and usage. --- Dockerfile | 2 +- README.md | 33 ++-- linkedin_mcp_server/__init__.py | 23 ++- linkedin_mcp_server/authentication.py | 120 ++++----------- linkedin_mcp_server/cli.py | 5 +- linkedin_mcp_server/config/__init__.py | 16 ++ linkedin_mcp_server/config/loaders.py | 193 ++++++++++++++++++------ linkedin_mcp_server/config/messages.py | 101 +++++++++++++ linkedin_mcp_server/config/providers.py | 19 ++- linkedin_mcp_server/config/schema.py | 62 +++++++- linkedin_mcp_server/config/secrets.py | 51 +++---- linkedin_mcp_server/drivers/__init__.py | 16 +- linkedin_mcp_server/drivers/chrome.py | 7 +- linkedin_mcp_server/error_handler.py | 8 +- linkedin_mcp_server/exceptions.py | 8 +- linkedin_mcp_server/logging_config.py | 18 ++- linkedin_mcp_server/server.py | 6 +- linkedin_mcp_server/setup.py | 40 +++-- linkedin_mcp_server/tools/__init__.py | 19 ++- linkedin_mcp_server/tools/company.py | 5 +- linkedin_mcp_server/tools/job.py | 5 +- linkedin_mcp_server/tools/person.py | 5 +- main.py | 91 ++++++----- manifest.json | 2 +- 24 files changed, 558 insertions(+), 297 deletions(-) create mode 100644 linkedin_mcp_server/config/messages.py diff --git a/Dockerfile b/Dockerfile index 1d1d57d9..5fcbb3a0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,5 +25,5 @@ RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser # Set entrypoint and default arguments -ENTRYPOINT ["uv", "run", "python", "main.py", "--no-setup"] +ENTRYPOINT ["uv", "run", "main.py"] CMD [] diff --git a/README.md b/README.md index db836916..7ee0a3ba 100644 --- a/README.md +++ b/README.md @@ -52,13 +52,12 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "linkedin": { "command": "docker", "args": [ - "run", "-i", "--rm", + "run", "--rm", "-e", "LINKEDIN_COOKIE", - "stickerdaniel/linkedin-mcp-server", - "--no-setup" + "stickerdaniel/linkedin-mcp-server" ], "env": { - "LINKEDIN_COOKIE": "XXXXXX...", + "LINKEDIN_COOKIE": "XXXXXX..." } } } @@ -71,9 +70,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Run the server with the `--get-cookie` flag:** ```bash -docker run -i --rm \ - -e LINKEDIN_EMAIL="your.email@example.com" \ - -e LINKEDIN_PASSWORD="your_password" \ +docker run -it --rm \ stickerdaniel/linkedin-mcp-server \ --get-cookie ``` @@ -103,8 +100,8 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c - **Streamable HTTP**: For a web-based MCP server **CLI Options:** -- `--no-setup` - Skip interactive prompts (required for Docker/non-interactive environments) -- `--debug` - Enable detailed logging +- `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) +- `--debug` - Enable debug logging (equivalent to --log-level DEBUG) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) @@ -115,12 +112,12 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **HTTP Mode Example (for web-based MCP clients):** ```bash -docker run -i --rm \ +docker run -it --rm \ -e LINKEDIN_EMAIL="your.email@example.com" \ -e LINKEDIN_PASSWORD="your_password" \ -p 8080:8080 \ stickerdaniel/linkedin-mcp-server \ - --no-setup --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp + --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` @@ -164,9 +161,7 @@ docker run -i --rm \ **Run the server with the `--get-cookie` flag:** ```bash -docker run -i --rm \ - -e LINKEDIN_EMAIL="your.email@example.com" \ - -e LINKEDIN_PASSWORD="your_password" \ +docker run -it --rm \ stickerdaniel/linkedin-mcp-server \ --get-cookie ``` @@ -247,8 +242,8 @@ uv run main.py --no-headless --no-lazy-init **CLI Options:** - `--no-headless` - Show browser window (debugging) -- `--debug` - Enable detailed logging -- `--no-setup` - Skip credential prompts (make sure to set `LINKEDIN_COOKIE` or `LINKEDIN_EMAIL` and `LINKEDIN_PASSWORD` in env or that you run the server once manually, so the authentication is stored in your OS keychain and you can run the server without credentials) +- `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) +- `--debug` - Enable debug logging (equivalent to --log-level DEBUG) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--get-cookie` - Login with email and password and extract the LinkedIn cookie - `--clear-keychain` - Clear all stored LinkedIn credentials and cookies from system keychain @@ -261,7 +256,7 @@ uv run main.py --no-headless --no-lazy-init **HTTP Mode Example (for web-based MCP clients):** ```bash -uv run main.py --no-setup --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp +uv run main.py --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp ``` **Claude Desktop:** @@ -270,7 +265,7 @@ uv run main.py --no-setup --transport streamable-http --host 127.0.0.1 --port 80 "mcpServers": { "linkedin": { "command": "uv", - "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py", "--no-setup"] + "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py"] } } } @@ -284,7 +279,7 @@ uv run main.py --no-setup --transport streamable-http --host 127.0.0.1 --port 80 **Login/Scraping issues:** - Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) - Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call -- Add `--debug` to see more detailed logging +- Add `--log-level DEBUG` to see more detailed logging - Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. E.g. if you have a logged in browser session with a docker container, you can't use the same cookie to login with the local setup while the docker container is running / session is not closed. **ChromeDriver issues:** diff --git a/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py index 230ec7ec..30c9a9f1 100644 --- a/linkedin_mcp_server/__init__.py +++ b/linkedin_mcp_server/__init__.py @@ -1,4 +1,25 @@ # src/linkedin_mcp_server/__init__.py -"""LinkedIn MCP Server package.""" +""" +LinkedIn MCP Server package. + +A Model Context Protocol (MCP) server that provides LinkedIn integration capabilities +for AI assistants. This package enables secure LinkedIn profile, company, and job +data scraping through a standardized MCP interface. + +Key Features: +- Secure LinkedIn authentication via session cookies +- LinkedIn profile, company, and job data scraping +- MCP-compliant server implementation using FastMCP +- Chrome WebDriver automation with session persistence +- Layered configuration system with secure credential storage +- Docker containerization for easy deployment +- Claude Desktop DXT extension support + +Architecture: +- Clean separation between authentication, driver management, and MCP server +- Singleton pattern for WebDriver session management +- Comprehensive error handling and logging +- Cross-platform compatibility (macOS, Windows, Linux) +""" __version__ = "1.0.0" diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 7d54b528..0d94d1a8 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -1,13 +1,16 @@ # linkedin_mcp_server/authentication.py """ -Pure authentication module for LinkedIn MCP Server. +Pure authentication logic for LinkedIn MCP Server. -This module handles authentication without any driver dependencies. +Handles LinkedIn session cookie management with secure storage and retrieval. +Provides layered authentication resolution from configuration, keyring, and user input. +Implements proper error handling with context-aware messaging. """ import logging from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.messages import ErrorMessages, InfoMessages from linkedin_mcp_server.config.providers import ( clear_cookie_from_keyring, get_cookie_from_keyring, @@ -22,28 +25,6 @@ logger = logging.getLogger(__name__) -def has_authentication() -> bool: - """ - Check if authentication is available without triggering setup. - - Returns: - bool: True if authentication (cookie) is available, False otherwise - """ - config = get_config() - - # Check environment variable - if config.linkedin.cookie: - return True - - # Check keyring if enabled - if config.linkedin.use_keyring: - cookie = get_cookie_from_keyring() - if cookie: - return True - - return False - - def get_authentication() -> str: """ Get LinkedIn cookie from available sources. @@ -56,17 +37,16 @@ def get_authentication() -> str: """ config = get_config() - # First, try environment variable + # First, try environment variable or command line if config.linkedin.cookie: - logger.info("Using LinkedIn cookie from environment") + logger.info(InfoMessages.using_cookie_from("configuration")) return config.linkedin.cookie - # Second, try keyring if enabled - if config.linkedin.use_keyring: - cookie = get_cookie_from_keyring() - if cookie: - logger.info("Using LinkedIn cookie from keyring") - return cookie + # Second, try keyring + cookie = get_cookie_from_keyring() + if cookie: + logger.info(InfoMessages.using_cookie_from("keyring")) + return cookie # No authentication available raise CredentialsNotFoundError("No LinkedIn cookie found") @@ -82,18 +62,12 @@ def store_authentication(cookie: str) -> bool: Returns: bool: True if storage was successful, False otherwise """ - config = get_config() - - if config.linkedin.use_keyring: - success = save_cookie_to_keyring(cookie) - if success: - logger.info("Cookie stored securely in keyring") - else: - logger.warning("Could not store cookie in system keyring") - return success + success = save_cookie_to_keyring(cookie) + if success: + logger.info(InfoMessages.cookie_stored_securely()) else: - logger.info("Keyring disabled, cookie not stored") - return False + logger.warning(InfoMessages.keyring_storage_failed()) + return success def clear_authentication() -> bool: @@ -103,54 +77,20 @@ def clear_authentication() -> bool: Returns: bool: True if clearing was successful, False otherwise """ - config = get_config() - - if config.linkedin.use_keyring: - success = clear_cookie_from_keyring() - if success: - logger.info("Authentication cleared from keyring") - else: - logger.warning("Could not clear authentication from keyring") - return success + success = clear_cookie_from_keyring() + if success: + logger.info("Authentication cleared from keyring") else: - logger.info("Keyring disabled, nothing to clear") - return True - - -def validate_cookie_format(cookie: str) -> bool: - """ - Validate that the cookie has the expected format. - - Args: - cookie: Cookie string to validate - - Returns: - bool: True if cookie format is valid, False otherwise - """ - if not cookie: - return False - - # LinkedIn session cookies typically start with "li_at=" - if cookie.startswith("li_at=") and len(cookie) > MIN_COOKIE_LENGTH: - return True - - # Also accept raw cookie values (without li_at= prefix) - if ( - not cookie.startswith("li_at=") - and len(cookie) > MIN_RAW_COOKIE_LENGTH - and "=" not in cookie - ): - return True - - return False + logger.warning("Could not clear authentication from keyring") + return success def ensure_authentication() -> str: """ - Ensure authentication is available, raising clear error if not. + Ensure authentication is available with clear error messages. Returns: - str: LinkedIn session cookie + str: Valid LinkedIn session cookie Raises: CredentialsNotFoundError: If no authentication is available with clear instructions @@ -160,12 +100,6 @@ def ensure_authentication() -> str: except CredentialsNotFoundError: config = get_config() - if config.chrome.non_interactive: - raise CredentialsNotFoundError( - "No LinkedIn cookie found. Please provide cookie via " - "environment variable (LINKEDIN_COOKIE) or run with --get-cookie to obtain one." - ) - else: - raise CredentialsNotFoundError( - "No LinkedIn authentication found. Please run setup to configure authentication." - ) + raise CredentialsNotFoundError( + ErrorMessages.no_cookie_found(config.is_interactive) + ) diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index 98521293..efce3c7a 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -1,8 +1,9 @@ # src/linkedin_mcp_server/cli.py """ -CLI utilities for LinkedIn MCP server. +CLI utilities for LinkedIn MCP server configuration generation. -This module handles the command-line interface and configuration management. +Automatically generates Claude Desktop configuration with proper tool registration, +environment variables, and clipboard integration for seamless setup workflow. """ import json diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index 17e4e9ef..5e332645 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -1,4 +1,20 @@ # src/linkedin_mcp_server/config/__init__.py +""" +Configuration system initialization and management for LinkedIn MCP Server. + +This module provides the main configuration interface and implements the singleton +pattern for configuration management. It orchestrates the loading of configuration +from multiple sources and provides a unified API for accessing configuration +throughout the application. + +Key Features: +- Singleton pattern for global configuration access +- Integration with all configuration providers and loaders +- Unified API for configuration access across the application +- Proper initialization and lifecycle management +- Support for configuration reloading and updates +""" + import logging from typing import Optional diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 3f3f2414..b7d315dd 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -1,14 +1,57 @@ # src/linkedin_mcp_server/config/loaders.py +""" +Configuration loading and argument parsing for LinkedIn MCP Server. + +This module implements the layered configuration system that loads settings from +multiple sources in priority order: CLI arguments โ†’ environment variables โ†’ keyring +โ†’ defaults. It provides the main configuration loading logic and argument parsing +for the MCP server. + +Key Functions: +- Command-line argument parsing with comprehensive options +- Environment variable parsing with type conversion +- Integration with keyring providers for secure credential loading +- Chrome driver path auto-detection and validation +- Layered configuration with proper priority handling +""" + import argparse import logging import os -from typing import Optional - -from .providers import get_chromedriver_paths +import sys +from typing import Any, Dict, Optional + +from .providers import ( + get_chromedriver_paths, + get_cookie_from_keyring, + get_credentials_from_keyring, +) from .schema import AppConfig logger = logging.getLogger(__name__) +# Boolean value mappings for environment variable parsing +TRUTHY_VALUES = ("1", "true", "True", "yes", "Yes") +FALSY_VALUES = ("0", "false", "False", "no", "No") + + +class EnvironmentKeys: + """Environment variable names used by the application.""" + + # LinkedIn configuration + LINKEDIN_EMAIL = "LINKEDIN_EMAIL" + LINKEDIN_PASSWORD = "LINKEDIN_PASSWORD" + LINKEDIN_COOKIE = "LINKEDIN_COOKIE" + + # Chrome configuration + CHROMEDRIVER = "CHROMEDRIVER" + HEADLESS = "HEADLESS" + + # Server configuration + LOG_LEVEL = "LOG_LEVEL" + LAZY_INIT = "LAZY_INIT" + TRANSPORT = "TRANSPORT" + def find_chromedriver() -> Optional[str]: """Find the ChromeDriver executable in common locations.""" @@ -25,50 +68,83 @@ def find_chromedriver() -> Optional[str]: return None +def is_interactive_environment() -> bool: + """ + Detect if running in an interactive environment (TTY). + + Returns: + bool: True if both stdin and stdout are TTY devices + """ + try: + return sys.stdin.isatty() and sys.stdout.isatty() + except (AttributeError, OSError): + # Handle cases where stdin/stdout might not have isatty() or fail + # This can happen in some containers, test environments, or non-standard setups + return False + + +def load_from_keyring(config: AppConfig) -> AppConfig: + """Load configuration from system keyring.""" + # Load LinkedIn cookie first (higher priority) + if cookie := get_cookie_from_keyring(): + config.linkedin.cookie = cookie + logger.debug("LinkedIn cookie loaded from keyring") + + # Load LinkedIn credentials if cookie not available + if not config.linkedin.cookie: + credentials = get_credentials_from_keyring() + if credentials["email"]: + config.linkedin.email = credentials["email"] + logger.debug("LinkedIn email loaded from keyring") + if credentials["password"]: + config.linkedin.password = credentials["password"] + logger.debug("LinkedIn password loaded from keyring") + + return config + + def load_from_env(config: AppConfig) -> AppConfig: """Load configuration from environment variables.""" + # LinkedIn credentials - if email := os.environ.get("LINKEDIN_EMAIL"): + if email := os.environ.get(EnvironmentKeys.LINKEDIN_EMAIL): config.linkedin.email = email - if password := os.environ.get("LINKEDIN_PASSWORD"): + if password := os.environ.get(EnvironmentKeys.LINKEDIN_PASSWORD): config.linkedin.password = password - if cookie := os.environ.get("LINKEDIN_COOKIE"): + if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): config.linkedin.cookie = cookie # ChromeDriver configuration - if chromedriver := os.environ.get("CHROMEDRIVER"): + if chromedriver := os.environ.get(EnvironmentKeys.CHROMEDRIVER): config.chrome.chromedriver_path = chromedriver - # Debug mode - if os.environ.get("DEBUG") in ("1", "true", "True", "yes", "Yes"): - config.server.debug = True + # Log level + if log_level_env := os.environ.get(EnvironmentKeys.LOG_LEVEL): + log_level_upper = log_level_env.upper() + if log_level_upper in ("DEBUG", "INFO", "WARNING", "ERROR"): + config.server.log_level = log_level_upper # Headless mode - if os.environ.get("HEADLESS") in ("0", "false", "False", "no", "No"): + if os.environ.get(EnvironmentKeys.HEADLESS) in FALSY_VALUES: config.chrome.headless = False - elif os.environ.get("HEADLESS") in ("1", "true", "True", "yes", "Yes"): + elif os.environ.get(EnvironmentKeys.HEADLESS) in TRUTHY_VALUES: config.chrome.headless = True - # Non-interactive mode - if os.environ.get("NON_INTERACTIVE") in ("1", "true", "True", "yes", "Yes"): - config.chrome.non_interactive = True - # Lazy initialization - if os.environ.get("LAZY_INIT") in ("1", "true", "True", "yes", "Yes"): + if os.environ.get(EnvironmentKeys.LAZY_INIT) in TRUTHY_VALUES: config.server.lazy_init = True - elif os.environ.get("LAZY_INIT") in ("0", "false", "False", "no", "No"): + elif os.environ.get(EnvironmentKeys.LAZY_INIT) in FALSY_VALUES: config.server.lazy_init = False # Transport mode - if transport_env := os.environ.get("TRANSPORT"): + if transport_env := os.environ.get(EnvironmentKeys.TRANSPORT): + config.server.transport_explicitly_set = True if transport_env == "stdio": config.server.transport = "stdio" - config.server.transport_explicitly_set = True elif transport_env == "streamable-http": config.server.transport = "streamable-http" - config.server.transport_explicitly_set = True return config @@ -86,15 +162,16 @@ def load_from_args(config: AppConfig) -> AppConfig: ) parser.add_argument( - "--debug", - action="store_true", - help="Enable debug mode with additional logging", + "--log-level", + choices=["DEBUG", "INFO", "WARNING", "ERROR"], + help="Set logging level (default: WARNING)", ) + # Keep --debug for backward compatibility (maps to DEBUG level) parser.add_argument( - "--no-setup", + "--debug", action="store_true", - help="Skip printing configuration information and interactive setup", + help="Enable debug logging (equivalent to --log-level DEBUG)", ) parser.add_argument( @@ -161,14 +238,12 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.no_headless: config.chrome.headless = False - if args.debug: - config.server.debug = True - - if args.no_setup: - config.server.setup = False - config.chrome.non_interactive = ( - True # Automatically set when --no-setup is used - ) + # Handle log level arguments + if args.log_level: + config.server.log_level = args.log_level + elif args.debug: + # Backward compatibility: --debug maps to DEBUG level + config.server.log_level = "DEBUG" if args.no_lazy_init: config.server.lazy_init = False @@ -189,9 +264,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.chromedriver: config.chrome.chromedriver_path = args.chromedriver - if hasattr(args, "get_cookie") and args.get_cookie: + if args.get_cookie: config.server.get_cookie = True - if hasattr(args, "clear_keychain") and args.clear_keychain: + if args.clear_keychain: config.server.clear_keychain = True if args.cookie: config.linkedin.cookie = args.cookie @@ -199,20 +274,54 @@ def load_from_args(config: AppConfig) -> AppConfig: return config +def detect_environment() -> Dict[str, Any]: + """ + Detect environment settings without side effects. + + Returns: + Dict containing detected environment settings + """ + return { + "chromedriver_path": find_chromedriver(), + "is_interactive": is_interactive_environment(), + } + + def load_config() -> AppConfig: """ - Load configuration from all sources with defined precedence: + Load configuration with clear precedence order. + + Configuration is loaded in the following priority order: 1. Command line arguments (highest priority) 2. Environment variables - 3. Default values and auto-detection (lowest priority) + 3. System keyring + 4. Auto-detection (ChromeDriver, interactive mode) + 5. Defaults (lowest priority) + + Returns: + AppConfig: Fully configured application settings + + Raises: + ConfigurationError: If configuration validation fails """ # Start with default configuration config = AppConfig() - # Auto-detect ChromeDriver path - if chromedriver_path := find_chromedriver(): - config.chrome.chromedriver_path = chromedriver_path - logger.debug(f"Auto-detected ChromeDriver at: {chromedriver_path}") + # Apply environment detection + env_settings = detect_environment() + + # Set detected values if not already configured + if env_settings["chromedriver_path"] and not config.chrome.chromedriver_path: + config.chrome.chromedriver_path = env_settings["chromedriver_path"] + logger.debug( + f"Auto-detected ChromeDriver found at: {env_settings['chromedriver_path']}" + ) + + config.is_interactive = env_settings["is_interactive"] + logger.debug(f"Auto-detected interactive mode: {config.is_interactive}") + + # Load from keyring (lowest override priority) + config = load_from_keyring(config) # Override with environment variables config = load_from_env(config) diff --git a/linkedin_mcp_server/config/messages.py b/linkedin_mcp_server/config/messages.py new file mode 100644 index 00000000..2008530e --- /dev/null +++ b/linkedin_mcp_server/config/messages.py @@ -0,0 +1,101 @@ +# linkedin_mcp_server/config/messages.py +""" +Centralized message formatting for consistent user communication across contexts. + +Provides structured error and informational messages with context-aware formatting +for interactive vs non-interactive modes and different authentication scenarios. +""" + + +class ErrorMessages: + """Centralized error message formatting for consistent communication.""" + + @staticmethod + def no_cookie_found(is_interactive: bool) -> str: + """ + Generate appropriate error message when no LinkedIn cookie is found. + + Args: + is_interactive: Whether the application is running in interactive mode + + Returns: + str: Formatted error message with appropriate instructions + """ + if is_interactive: + return "No LinkedIn authentication found. Please run setup to configure authentication." + else: + return ( + "No LinkedIn cookie found. You can:\n" + " 1. Run with --get-cookie to extract a cookie using email/password\n" + " 2. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie" + ) + + @staticmethod + def no_credentials_found() -> str: + """Error message when credentials are required but not found.""" + return ( + "No LinkedIn credentials found. Please provide credentials via " + "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) for setup." + ) + + @staticmethod + def invalid_cookie_format(cookie_sample: str) -> str: + """ + Error message for invalid cookie format. + + Args: + cookie_sample: Sample of the invalid cookie (truncated for security) + + Returns: + str: Formatted error message + """ + # Only show first 20 characters for security + safe_sample = ( + cookie_sample[:20] + "..." if len(cookie_sample) > 20 else cookie_sample + ) + return ( + f"Invalid LinkedIn cookie format: '{safe_sample}'. " + "Cookie should be a LinkedIn session token (li_at=...) or raw token value." + ) + + @staticmethod + def authentication_setup_instructions() -> str: + """Instructions for setting up authentication.""" + return ( + "To set up LinkedIn authentication:\n" + " 1. Run with --get-cookie flag to extract a session cookie\n" + " 2. Or set LINKEDIN_COOKIE environment variable\n" + " 3. Or run interactively to enter credentials" + ) + + +class InfoMessages: + """Centralized informational message formatting.""" + + @staticmethod + def credentials_stored_securely() -> str: + """Message when credentials are successfully stored.""" + return "Credentials stored securely in system keyring" + + @staticmethod + def cookie_stored_securely() -> str: + """Message when cookie is successfully stored.""" + return "Cookie stored securely in system keyring" + + @staticmethod + def keyring_storage_failed() -> str: + """Warning when keyring storage fails.""" + return "Could not store credentials in system keyring" + + @staticmethod + def using_cookie_from(source: str) -> str: + """ + Message indicating cookie source. + + Args: + source: Source of the cookie (e.g., "environment", "keyring", "configuration") + + Returns: + str: Formatted message + """ + return f"Using LinkedIn cookie from {source}" diff --git a/linkedin_mcp_server/config/providers.py b/linkedin_mcp_server/config/providers.py index 8de2e1e8..0a05c7c1 100644 --- a/linkedin_mcp_server/config/providers.py +++ b/linkedin_mcp_server/config/providers.py @@ -1,8 +1,23 @@ # src/linkedin_mcp_server/config/providers.py -from typing import Dict, Optional, List +""" +Configuration providers for LinkedIn MCP Server. + +This module provides secure credential storage and retrieval using the system keyring, +as well as utility functions for Chrome driver path detection. It abstracts the +complexity of different keyring backends across macOS, Windows, and Linux. + +Key Functions: +- System keyring integration for LinkedIn credentials and cookies +- Chrome driver path detection across different operating systems +- Secure credential management with proper error handling +- Cross-platform compatibility with appropriate keyring backends +""" + +import logging import os import platform -import logging +from typing import Dict, List, Optional + import keyring from keyring.errors import KeyringError diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 501ac96c..07fb4f53 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -1,6 +1,27 @@ # src/linkedin_mcp_server/config/schema.py +""" +Configuration schema definitions for LinkedIn MCP Server. + +This module defines the dataclass schemas that represent the application's configuration +structure. It provides type-safe configuration objects with validation and default values +for all aspects of the server including Chrome driver settings, LinkedIn credentials, +and MCP server parameters. + +Key Components: +- ChromeConfig: Chrome driver and browser configuration +- LinkedInConfig: LinkedIn authentication and connection settings +- ServerConfig: MCP server transport and operational settings +- AppConfig: Main application configuration combining all components +""" + from dataclasses import dataclass, field -from typing import Optional, List, Literal +from typing import List, Literal, Optional + + +class ConfigurationError(Exception): + """Raised when configuration validation fails.""" + + pass @dataclass @@ -10,7 +31,6 @@ class ChromeConfig: headless: bool = True chromedriver_path: Optional[str] = None browser_args: List[str] = field(default_factory=list) - non_interactive: bool = False @dataclass @@ -20,7 +40,6 @@ class LinkedInConfig: email: Optional[str] = None password: Optional[str] = None cookie: Optional[str] = None - use_keyring: bool = True @dataclass @@ -30,8 +49,7 @@ class ServerConfig: transport: Literal["stdio", "streamable-http"] = "stdio" transport_explicitly_set: bool = False # Track if transport was explicitly set lazy_init: bool = True - debug: bool = False - setup: bool = True + log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "WARNING" get_cookie: bool = False clear_keychain: bool = False # HTTP transport configuration @@ -47,3 +65,37 @@ class AppConfig: chrome: ChromeConfig = field(default_factory=ChromeConfig) linkedin: LinkedInConfig = field(default_factory=LinkedInConfig) server: ServerConfig = field(default_factory=ServerConfig) + is_interactive: bool = field(default=False) + + def __post_init__(self) -> None: + """Validate configuration after initialization.""" + self._validate_transport_config() + self._validate_port_range() + self._validate_path_format() + + def _validate_transport_config(self) -> None: + """Validate transport configuration is consistent.""" + if self.server.transport == "streamable-http": + if not self.server.host: + raise ConfigurationError("HTTP transport requires a valid host") + if not self.server.port: + raise ConfigurationError("HTTP transport requires a valid port") + + def _validate_port_range(self) -> None: + """Validate port is in valid range.""" + if not (1 <= self.server.port <= 65535): + raise ConfigurationError( + f"Port {self.server.port} is not in valid range (1-65535)" + ) + + def _validate_path_format(self) -> None: + """Validate path format for HTTP transport.""" + if self.server.transport == "streamable-http": + if not self.server.path.startswith("/"): + raise ConfigurationError( + f"HTTP path '{self.server.path}' must start with '/'" + ) + if len(self.server.path) < 2: + raise ConfigurationError( + f"HTTP path '{self.server.path}' must be at least 2 characters" + ) diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py index c01dc1ba..be7a2bd1 100644 --- a/linkedin_mcp_server/config/secrets.py +++ b/linkedin_mcp_server/config/secrets.py @@ -1,14 +1,25 @@ # src/linkedin_mcp_server/config/secrets.py +""" +Interactive credential prompting and secure storage for LinkedIn MCP Server. + +This module handles interactive credential collection from users and securely stores +them in the system keyring. It provides a user-friendly interface for credential +input while ensuring security through proper keyring integration. + +Key Functions: +- Interactive credential prompting with secure password input +- Automatic storage of credentials in system keyring +- User-friendly error handling and feedback +- Integration with the keyring providers for secure storage +""" + import logging from typing import Dict import inquirer # type: ignore -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.exceptions import CredentialsNotFoundError from .providers import ( - get_credentials_from_keyring, get_keyring_name, save_credentials_to_keyring, ) @@ -16,33 +27,6 @@ logger = logging.getLogger(__name__) -def get_credentials() -> Dict[str, str]: - """Get LinkedIn credentials from config, keyring, or prompt (legacy for --get-cookie).""" - config = get_config() - - # First, try configuration (includes environment variables) - if config.linkedin.email and config.linkedin.password: - logger.info("Using LinkedIn credentials from configuration") - return {"email": config.linkedin.email, "password": config.linkedin.password} - - # Second, try keyring if enabled - if config.linkedin.use_keyring: - credentials = get_credentials_from_keyring() - if credentials["email"] and credentials["password"]: - logger.info(f"Using LinkedIn credentials from {get_keyring_name()}") - return {"email": credentials["email"], "password": credentials["password"]} - - # If in non-interactive mode and no credentials found, raise error - if config.chrome.non_interactive: - raise CredentialsNotFoundError( - "No LinkedIn credentials found. Please provide credentials via " - "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) or keyring." - ) - - # Otherwise, prompt for credentials - return prompt_for_credentials() - - def prompt_for_credentials() -> Dict[str, str]: """Prompt user for LinkedIn credentials and store them securely.""" print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") @@ -50,16 +34,15 @@ def prompt_for_credentials() -> Dict[str, str]: inquirer.Text("email", message="LinkedIn Email"), inquirer.Password("password", message="LinkedIn Password"), ] - credentials = inquirer.prompt(questions) + credentials: Dict[str, str] = inquirer.prompt(questions) if not credentials: raise KeyboardInterrupt("Credential input was cancelled") # Store credentials securely in keyring if save_credentials_to_keyring(credentials["email"], credentials["password"]): - logger.info(f"Credentials stored securely in {get_keyring_name()}") + logger.info("Credentials stored securely in keyring") else: - logger.warning("Could not store credentials in system keyring.") - logger.info("Your credentials will only be used for this session.") + logger.warning("Could not store credentials in system keyring") return credentials diff --git a/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py index e93f62bb..3d123cab 100644 --- a/linkedin_mcp_server/drivers/__init__.py +++ b/linkedin_mcp_server/drivers/__init__.py @@ -1,2 +1,16 @@ # src/linkedin_mcp_server/drivers/__init__.py -"""Driver management for LinkedIn scraping.""" +""" +Driver management package for LinkedIn scraping. + +This package provides Chrome WebDriver management and automation capabilities +for LinkedIn scraping. It implements a singleton pattern for driver instances +to ensure session persistence across multiple tool calls while handling +authentication, session management, and proper resource cleanup. + +Key Components: +- Chrome WebDriver initialization and configuration +- LinkedIn authentication and session management +- Singleton pattern for driver reuse across tools +- Automatic driver cleanup and resource management +- Cross-platform Chrome driver detection and setup +""" diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 902ecb0b..82a2eb40 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -1,9 +1,10 @@ # linkedin_mcp_server/drivers/chrome.py """ -Chrome driver management for LinkedIn scraping. +Chrome WebDriver management for LinkedIn scraping with session persistence. -This module handles the creation and management of Chrome WebDriver instances. -Simplified to focus only on driver management without authentication setup. +Handles Chrome WebDriver creation, configuration, authentication, and lifecycle management. +Implements singleton pattern for driver reuse across tools with automatic cleanup. +Provides cookie-based authentication and comprehensive error handling. """ import logging diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 745f306e..e2f4cc02 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -1,8 +1,10 @@ +# src/linkedin_mcp_server/error_handler.py """ -Centralized error handling for LinkedIn MCP Server tools. +Centralized error handling for LinkedIn MCP Server with structured responses. -This module provides a DRY approach to error handling across all tools, -eliminating code duplication and ensuring consistent error responses. +Provides DRY approach to error handling across all tools with consistent MCP response +format, specific LinkedIn error categorization, and proper logging integration. +Eliminates code duplication while ensuring user-friendly error messages. """ import logging diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py index 4f5799fb..d3f48425 100644 --- a/linkedin_mcp_server/exceptions.py +++ b/linkedin_mcp_server/exceptions.py @@ -1,8 +1,10 @@ +# src/linkedin_mcp_server/exceptions.py """ -Custom exceptions for LinkedIn MCP Server. +Custom exceptions for LinkedIn MCP Server with specific error categorization. -This module defines specific exception types for different error scenarios -to provide better error handling and reporting to MCP clients. +Defines hierarchical exception types for different error scenarios including +authentication failures, driver initialization issues, and MCP client reporting. +Provides structured error handling for better debugging and user experience. """ diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index c5871a24..35e0a735 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -1,8 +1,10 @@ +# linkedin_mcp_server/logging_config.py """ -Logging configuration for LinkedIn MCP Server. +Logging configuration for LinkedIn MCP Server with format options. -This module provides structured JSON logging for better integration -with MCP clients and monitoring systems. +Provides JSON and compact logging formats for different deployment scenarios. +JSON format for production MCP integration, compact format for development. +Includes proper logger hierarchy and external library noise reduction. """ import json @@ -77,15 +79,15 @@ def format(self, record: logging.LogRecord) -> str: return f"{record_copy.asctime} - {record_copy.name} - {record.levelname} - {record.getMessage()}" -def configure_logging(debug: bool = False, json_format: bool = False) -> None: +def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> None: """Configure logging for the LinkedIn MCP Server. Args: - debug: Whether to enable debug logging + log_level: Logging level (DEBUG, INFO, WARNING, ERROR) json_format: Whether to use JSON formatting for logs """ - # Set end-user appropriate logging level: WARNING for production, DEBUG for debug mode - log_level = logging.DEBUG if debug else logging.WARNING + # Convert string to logging level + numeric_level = getattr(logging, log_level.upper(), logging.WARNING) if json_format: formatter = MCPJSONFormatter() @@ -94,7 +96,7 @@ def configure_logging(debug: bool = False, json_format: bool = False) -> None: # Configure root logger root_logger = logging.getLogger() - root_logger.setLevel(log_level) + root_logger.setLevel(numeric_level) # Remove existing handlers for handler in root_logger.handlers[:]: diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 601ba322..7f7370a2 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -1,8 +1,10 @@ # src/linkedin_mcp_server/server.py """ -MCP server setup for LinkedIn integration. +FastMCP server implementation for LinkedIn integration with tool registration. -This module creates the MCP server and registers all the LinkedIn tools. +Creates and configures the MCP server with comprehensive LinkedIn tool suite including +person profiles, company data, job information, and session management capabilities. +Provides clean shutdown handling and resource cleanup. """ import logging diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 7a7ec3e4..408478e0 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -1,8 +1,10 @@ # linkedin_mcp_server/setup.py """ -Interactive setup module for LinkedIn MCP Server. +Interactive setup flows for LinkedIn MCP Server authentication configuration. -This module handles interactive setup flows and authentication configuration. +Handles credential collection, cookie extraction, validation, and secure storage +with multiple authentication methods including cookie input and credential-based login. +Provides temporary driver management and comprehensive retry logic. """ import logging @@ -14,6 +16,7 @@ from linkedin_mcp_server.authentication import store_authentication from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.config.messages import ErrorMessages, InfoMessages from linkedin_mcp_server.config.providers import ( get_credentials_from_keyring, save_credentials_to_keyring, @@ -41,19 +44,15 @@ def get_credentials_for_setup() -> Dict[str, str]: logger.info("Using LinkedIn credentials from configuration") return {"email": config.linkedin.email, "password": config.linkedin.password} - # Second, try keyring if enabled - if config.linkedin.use_keyring: - credentials = get_credentials_from_keyring() - if credentials["email"] and credentials["password"]: - logger.info("Using LinkedIn credentials from keyring") - return {"email": credentials["email"], "password": credentials["password"]} + # Second, try keyring + credentials = get_credentials_from_keyring() + if credentials["email"] and credentials["password"]: + logger.info("Using LinkedIn credentials from keyring") + return {"email": credentials["email"], "password": credentials["password"]} # If in non-interactive mode and no credentials found, raise error - if config.chrome.non_interactive: - raise CredentialsNotFoundError( - "No LinkedIn credentials found. Please provide credentials via " - "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) for setup." - ) + if not config.is_interactive: + raise CredentialsNotFoundError(ErrorMessages.no_credentials_found()) # Otherwise, prompt for credentials return prompt_for_credentials() @@ -69,8 +68,6 @@ def prompt_for_credentials() -> Dict[str, str]: Raises: KeyboardInterrupt: If user cancels input """ - config: AppConfig = get_config() - print("๐Ÿ”‘ LinkedIn credentials required for setup") questions = [ inquirer.Text("email", message="LinkedIn Email"), @@ -81,12 +78,11 @@ def prompt_for_credentials() -> Dict[str, str]: if not credentials: raise KeyboardInterrupt("Credential input was cancelled") - # Store credentials securely in keyring if enabled - if config.linkedin.use_keyring: - if save_credentials_to_keyring(credentials["email"], credentials["password"]): - logger.info("Credentials stored securely in keyring") - else: - logger.warning("Could not store credentials in system keyring") + # Store credentials securely in keyring + if save_credentials_to_keyring(credentials["email"], credentials["password"]): + logger.info(InfoMessages.credentials_stored_securely()) + else: + logger.warning(InfoMessages.keyring_storage_failed()) return credentials @@ -133,7 +129,7 @@ def capture_cookie_from_credentials(email: str, password: str) -> str: from linkedin_scraper import actions config: AppConfig = get_config() - interactive: bool = not config.chrome.non_interactive + interactive: bool = config.is_interactive logger.info(f"Logging in to LinkedIn... Interactive: {interactive}") actions.login( driver, diff --git a/linkedin_mcp_server/tools/__init__.py b/linkedin_mcp_server/tools/__init__.py index 23416850..aa5b9088 100644 --- a/linkedin_mcp_server/tools/__init__.py +++ b/linkedin_mcp_server/tools/__init__.py @@ -1,2 +1,19 @@ # src/linkedin_mcp_server/tools/__init__.py -"""Tools for LinkedIn scraping.""" +""" +LinkedIn scraping tools package. + +This package contains the MCP tool implementations for LinkedIn data extraction. +Each tool module provides specific functionality for different LinkedIn entities +while sharing common error handling and driver management patterns. + +Available Tools: +- Person tools: LinkedIn profile scraping and analysis +- Company tools: Company profile and information extraction +- Job tools: Job posting details and search functionality + +Architecture: +- FastMCP integration for MCP-compliant tool registration +- Shared error handling through centralized error_handler module +- Singleton driver pattern for session persistence +- Structured data return format for consistent MCP responses +""" diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index f3a3810c..f986f140 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -1,8 +1,9 @@ # src/linkedin_mcp_server/tools/company.py """ -Company profile tools for LinkedIn MCP server. +LinkedIn company profile scraping tools with employee data extraction. -This module provides tools for scraping LinkedIn company profiles. +Provides MCP tools for extracting company information, employee lists, and company +insights from LinkedIn with configurable depth and comprehensive error handling. """ import logging diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 1ad04e12..81794868 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -1,8 +1,9 @@ # src/linkedin_mcp_server/tools/job.py """ -Job tools for LinkedIn MCP server. +LinkedIn job scraping tools with search and detail extraction capabilities. -This module provides tools for scraping LinkedIn job postings and searches. +Provides MCP tools for job posting details, job searches, and recommendations +with comprehensive filtering and structured data extraction. """ import logging diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 6a24f8c5..750bb003 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -1,8 +1,9 @@ # src/linkedin_mcp_server/tools/person.py """ -Person profile tools for LinkedIn MCP server. +LinkedIn person profile scraping tools with structured data extraction. -This module provides tools for scraping LinkedIn person profiles. +Provides MCP tools for extracting comprehensive LinkedIn profile information including +experience, education, skills, and contact details with proper error handling. """ import logging diff --git a/main.py b/main.py index f5e71265..51ed93b1 100644 --- a/main.py +++ b/main.py @@ -1,11 +1,12 @@ # main.py """ -LinkedIn MCP Server - A Model Context Protocol server for LinkedIn integration. +LinkedIn MCP Server - Main application entry point. + +Implements a three-phase startup: +1. Authentication Setup Phase - Credential validation and session establishment +2. Driver Management Phase - Chrome WebDriver initialization with LinkedIn login +3. Server Runtime Phase - MCP server startup with transport selection -Clean architecture with clear phase separation: -1. Authentication Setup Phase -2. Driver Management Phase -3. Server Runtime Phase """ import logging @@ -23,15 +24,11 @@ TwoFactorAuthError, ) -from linkedin_mcp_server.authentication import ( - ensure_authentication, - has_authentication, -) from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.config import ( - get_config, - clear_all_keychain_data, check_keychain_data_exists, + clear_all_keychain_data, + get_config, get_keyring_name, ) from linkedin_mcp_server.config.schema import AppConfig @@ -46,7 +43,7 @@ def should_suppress_stdout(config: AppConfig) -> bool: """Check if stdout should be suppressed to avoid interfering with MCP stdio protocol.""" - return not config.server.setup and config.server.transport == "stdio" + return not config.is_interactive and config.server.transport == "stdio" def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: @@ -74,10 +71,10 @@ def clear_keychain_and_exit() -> None: """Clear LinkedIn keychain data and exit.""" config = get_config() - # Configure logging - prioritize debug mode over non_interactive + # Configure logging configure_logging( - debug=config.server.debug, - json_format=config.chrome.non_interactive and not config.server.debug, + log_level=config.server.log_level, + json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) # Get version for logging @@ -153,10 +150,10 @@ def get_cookie_and_exit() -> None: """Get LinkedIn cookie and exit (for Docker setup).""" config = get_config() - # Configure logging - prioritize debug mode over non_interactive + # Configure logging configure_logging( - debug=config.server.debug, - json_format=config.chrome.non_interactive and not config.server.debug, + log_level=config.server.log_level, + json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) # Get version for logging @@ -231,23 +228,20 @@ def ensure_authentication_ready() -> str: """ config = get_config() - # Check if authentication already exists - if has_authentication(): - try: - return ensure_authentication() - except CredentialsNotFoundError: - # Authentication exists but might be invalid, continue to setup - pass + # Check if we already have a cookie in config (from keyring, env, or args) + if config.linkedin.cookie: + logger.info("Using LinkedIn cookie from configuration") + return config.linkedin.cookie - # If in non-interactive mode and no auth, fail immediately - if config.chrome.non_interactive: + # If in non-interactive mode and no cookie, fail immediately + if not config.is_interactive: raise CredentialsNotFoundError( "No LinkedIn cookie found for non-interactive mode. You can:\n" - " 1. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie\n" - " 2. Run with --get-cookie to extract a cookie using email/password" + " 1. Run with --get-cookie to extract a cookie using email/password\n" + " 2. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie" ) - # Run interactive setup + # Run interactive setup to get credentials and obtain cookie logger.info("Setting up LinkedIn authentication...") return run_interactive_setup() @@ -285,8 +279,8 @@ def initialize_driver_with_auth(authentication: str) -> None: def get_version() -> str: """Get version from pyproject.toml.""" try: - import tomllib import os + import tomllib pyproject_path = os.path.join(os.path.dirname(__file__), "pyproject.toml") with open(pyproject_path, "rb") as f: @@ -298,12 +292,6 @@ def get_version() -> str: def main() -> None: """Main application entry point with clear phase separation.""" - # Get version - version = get_version() - - logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") - print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") - print("=" * 40) # Get configuration config = get_config() @@ -312,6 +300,13 @@ def main() -> None: if should_suppress_stdout(config): sys.stdout = open(os.devnull, "w") + # Get version + version = get_version() + + logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + print("=" * 40) + # Handle --clear-keychain flag immediately if config.server.clear_keychain: clear_keychain_and_exit() @@ -320,10 +315,10 @@ def main() -> None: if config.server.get_cookie: get_cookie_and_exit() - # Configure logging - prioritize debug mode over non_interactive + # Configure logging configure_logging( - debug=config.server.debug, - json_format=config.chrome.non_interactive and not config.server.debug, + log_level=config.server.log_level, + json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) logger.debug(f"Server configuration: {config}") @@ -335,7 +330,7 @@ def main() -> None: logger.info("Authentication ready") except CredentialsNotFoundError as e: logger.error(f"Authentication setup failed: {e}") - if config.chrome.non_interactive: + if not config.is_interactive: print("\nโŒ LinkedIn cookie required for Docker/non-interactive mode") else: print( @@ -358,7 +353,7 @@ def main() -> None: # Cookie was already cleared in driver layer # In interactive mode, try setup again - if not config.chrome.non_interactive and config.server.setup: + if config.is_interactive: print(f"\nโŒ {str(e)}") print("๐Ÿ”„ Starting interactive setup for new authentication...") try: @@ -398,17 +393,17 @@ def main() -> None: transport = config.server.transport # Only show transport prompt if: - # a) we don't have --no-setup flag (config.server.setup is True) AND + # a) running in interactive environment AND # b) transport wasn't explicitly set via CLI/env - if config.server.setup and not config.server.transport_explicitly_set: + if config.is_interactive and not config.server.transport_explicitly_set: print("\n๐Ÿš€ Server ready! Choose transport mode:") transport = choose_transport_interactive() - elif not config.server.setup and not config.server.transport_explicitly_set: - # If we have --no-setup and no transport explicitly set, use default (stdio) + elif not config.is_interactive and not config.server.transport_explicitly_set: + # If non-interactive and no transport explicitly set, use default (stdio) transport = config.server.transport - # Print configuration for Claude if in setup mode and using stdio transport - if config.server.setup and transport == "stdio": + # Print configuration for Claude if in interactive mode and using stdio transport + if config.is_interactive and transport == "stdio": print_claude_config() # Create and run the MCP server diff --git a/manifest.json b/manifest.json index 0ec9d2f7..cb552f04 100644 --- a/manifest.json +++ b/manifest.json @@ -23,7 +23,7 @@ "mcp_config": { "command": "docker", "args": [ - "run", "-i", "--rm", + "run", "--rm", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "stickerdaniel/linkedin-mcp-server:latest" ] From 6284a057b1880340fb2eb0b7d53613dcb1b4c51a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:08:47 -0400 Subject: [PATCH 176/565] refactor(config): remove deprecated debug argument --- README.md | 2 -- linkedin_mcp_server/config/loaders.py | 12 +----------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/README.md b/README.md index 7ee0a3ba..41b52b5e 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,6 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **CLI Options:** - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--debug` - Enable debug logging (equivalent to --log-level DEBUG) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) @@ -243,7 +242,6 @@ uv run main.py --no-headless --no-lazy-init **CLI Options:** - `--no-headless` - Show browser window (debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--debug` - Enable debug logging (equivalent to --log-level DEBUG) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--get-cookie` - Login with email and password and extract the LinkedIn cookie - `--clear-keychain` - Clear all stored LinkedIn credentials and cookies from system keychain diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index b7d315dd..d8fce518 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -167,13 +167,6 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Set logging level (default: WARNING)", ) - # Keep --debug for backward compatibility (maps to DEBUG level) - parser.add_argument( - "--debug", - action="store_true", - help="Enable debug logging (equivalent to --log-level DEBUG)", - ) - parser.add_argument( "--no-lazy-init", action="store_true", @@ -238,12 +231,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.no_headless: config.chrome.headless = False - # Handle log level arguments + # Handle log level argument if args.log_level: config.server.log_level = args.log_level - elif args.debug: - # Backward compatibility: --debug maps to DEBUG level - config.server.log_level = "DEBUG" if args.no_lazy_init: config.server.lazy_init = False From cb523d255560da3f272616a2c98d1f652f80cf94 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:16:21 -0400 Subject: [PATCH 177/565] style(main): update server stop message emoji --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index 51ed93b1..c768e049 100644 --- a/main.py +++ b/main.py @@ -425,7 +425,7 @@ def main() -> None: mcp.run(transport=transport) except KeyboardInterrupt: - print("\n\n๐Ÿ‘‹ Server stopped by user") + print("\n\nโน๏ธ Server stopped by user") exit_gracefully(0) except Exception as e: logger.error(f"Server runtime error: {e}") From e119295d11d74cd38414ef79f42022dccf33eab3 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:24:11 -0400 Subject: [PATCH 178/565] chore(version): bump version to 1.2.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0640987e..b31a621d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.1.7" +version = "1.2.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 78ee4954..02e32c9d 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.1.7" +version = "1.2.0" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 0c3a6cc313cb0a57b572f8c61d6746d5f6c0c767 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:24:40 +0000 Subject: [PATCH 179/565] chore(dxt): update manifest.json version to v1.2.0 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index cb552f04..ec66d4f4 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.1.7", + "version": "1.2.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 1d104b5d29be9b4a02f0d14170127b6d9b31c360 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:38:01 -0400 Subject: [PATCH 180/565] fix(main): logging and output for claude desktop --- main.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/main.py b/main.py index c768e049..de21d2e5 100644 --- a/main.py +++ b/main.py @@ -293,20 +293,23 @@ def get_version() -> str: def main() -> None: """Main application entry point with clear phase separation.""" - # Get configuration + # Get configuration (this sets config.is_interactive) config = get_config() # Suppress stdout if running in MCP stdio mode to avoid interfering with JSON-RPC protocol if should_suppress_stdout(config): sys.stdout = open(os.devnull, "w") - # Get version + # Get version for logging/display version = get_version() - logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + # Print banner print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") print("=" * 40) + # Always log version (this goes to stderr/logging, not stdout) + logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + # Handle --clear-keychain flag immediately if config.server.clear_keychain: clear_keychain_and_exit() From 02aa4d840e8a402f241e4898db67e79a87331605 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:41:32 -0400 Subject: [PATCH 181/565] fix(main): correct server stop message formatting --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index de21d2e5..c17efc27 100644 --- a/main.py +++ b/main.py @@ -428,7 +428,7 @@ def main() -> None: mcp.run(transport=transport) except KeyboardInterrupt: - print("\n\nโน๏ธ Server stopped by user") + print("โน๏ธ Server stopped by user") exit_gracefully(0) except Exception as e: logger.error(f"Server runtime error: {e}") From a2f78d5c52f774dae6dd2a6fb8e560b02c980d28 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:44:45 -0400 Subject: [PATCH 182/565] fix(main): adjust spacing in server stop message --- main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.py b/main.py index c17efc27..08130adf 100644 --- a/main.py +++ b/main.py @@ -428,7 +428,7 @@ def main() -> None: mcp.run(transport=transport) except KeyboardInterrupt: - print("โน๏ธ Server stopped by user") + print("โน๏ธ Server stopped by user") exit_gracefully(0) except Exception as e: logger.error(f"Server runtime error: {e}") From 4f77d0cf4266540573407f72ae5183ec9278dccf Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:45:16 -0400 Subject: [PATCH 183/565] chore(version): bump version to 1.2.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b31a621d..6b39cf8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.0" +version = "1.2.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 02e32c9d..ad01aaf6 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.0" +version = "1.2.1" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From e776fcb853ac6955a33f24a505a61c340e1ba94a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:45:38 +0000 Subject: [PATCH 184/565] chore(dxt): update manifest.json version to v1.2.1 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index ec66d4f4..0fadb566 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.0", + "version": "1.2.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From c3023c6d4453520eb7f42abafd379102c731d7e7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 05:59:23 -0400 Subject: [PATCH 185/565] refactor(main): streamline stdout handling in main function --- main.py | 18 ++++-------------- pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/main.py b/main.py index 08130adf..30452e39 100644 --- a/main.py +++ b/main.py @@ -10,7 +10,6 @@ """ import logging -import os import sys from typing import Literal @@ -31,7 +30,6 @@ get_config, get_keyring_name, ) -from linkedin_mcp_server.config.schema import AppConfig from linkedin_mcp_server.drivers.chrome import close_all_drivers, get_or_create_driver from linkedin_mcp_server.exceptions import CredentialsNotFoundError, LinkedInMCPError from linkedin_mcp_server.logging_config import configure_logging @@ -41,11 +39,6 @@ logger = logging.getLogger(__name__) -def should_suppress_stdout(config: AppConfig) -> bool: - """Check if stdout should be suppressed to avoid interfering with MCP stdio protocol.""" - return not config.is_interactive and config.server.transport == "stdio" - - def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: """Prompt user for transport mode using inquirer.""" questions = [ @@ -296,16 +289,13 @@ def main() -> None: # Get configuration (this sets config.is_interactive) config = get_config() - # Suppress stdout if running in MCP stdio mode to avoid interfering with JSON-RPC protocol - if should_suppress_stdout(config): - sys.stdout = open(os.devnull, "w") - # Get version for logging/display version = get_version() - # Print banner - print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") - print("=" * 40) + # Only print banner in interactive mode (to avoid interfering with MCP protocol) + if config.is_interactive: + print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + print("=" * 40) # Always log version (this goes to stderr/logging, not stdout) logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") diff --git a/pyproject.toml b/pyproject.toml index 6b39cf8f..43302f6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.1" +version = "1.2.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index ad01aaf6..0cc92b51 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.1" +version = "1.2.2" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 1f66310241b9557a8a3c86d80e5ecbc7a6895b97 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:59:59 +0000 Subject: [PATCH 186/565] chore(dxt): update manifest.json version to v1.2.2 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 0fadb566..db4f8cf1 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.1", + "version": "1.2.2", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 9e2a6a8700e9175857ad326312de10853e1f35a5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 06:06:56 -0400 Subject: [PATCH 187/565] refactor(main): move logging configuration to top --- main.py | 12 ++++++------ pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/main.py b/main.py index 30452e39..fc7c16bc 100644 --- a/main.py +++ b/main.py @@ -289,6 +289,12 @@ def main() -> None: # Get configuration (this sets config.is_interactive) config = get_config() + # Configure logging FIRST (before any logger usage) + configure_logging( + log_level=config.server.log_level, + json_format=not config.is_interactive and config.server.log_level != "DEBUG", + ) + # Get version for logging/display version = get_version() @@ -308,12 +314,6 @@ def main() -> None: if config.server.get_cookie: get_cookie_and_exit() - # Configure logging - configure_logging( - log_level=config.server.log_level, - json_format=not config.is_interactive and config.server.log_level != "DEBUG", - ) - logger.debug(f"Server configuration: {config}") # Phase 1: Ensure Authentication is Ready diff --git a/pyproject.toml b/pyproject.toml index 43302f6b..f6a11746 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.2" +version = "1.2.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 0cc92b51..57de1b87 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.2" +version = "1.2.3" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From b8d9d64510b185efba87e9c7119f453afc6e8106 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:07:22 +0000 Subject: [PATCH 188/565] chore(dxt): update manifest.json version to v1.2.3 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index db4f8cf1..faa61451 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.2", + "version": "1.2.3", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From 487935bda2cdd11fdb19270a567b529386c183f8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 06:28:46 -0400 Subject: [PATCH 189/565] chore(version): bump version to 1.2.4 and fix docker config in manifest --- README.md | 14 ++++++++------ manifest.json | 3 ++- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 41b52b5e..c6403ba5 100644 --- a/README.md +++ b/README.md @@ -52,12 +52,14 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "linkedin": { "command": "docker", "args": [ - "run", "--rm", + "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE", - "stickerdaniel/linkedin-mcp-server" + "-e", "LOG_LEVEL", + "stickerdaniel/linkedin-mcp-server:latest" ], "env": { - "LINKEDIN_COOKIE": "XXXXXX..." + "LINKEDIN_COOKIE": "XXXXXX...", + "LOG_LEVEL": "INFO" } } } @@ -71,7 +73,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Run the server with the `--get-cookie` flag:** ```bash docker run -it --rm \ - stickerdaniel/linkedin-mcp-server \ + stickerdaniel/linkedin-mcp-server:latest \ --get-cookie ``` Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. @@ -115,7 +117,7 @@ docker run -it --rm \ -e LINKEDIN_EMAIL="your.email@example.com" \ -e LINKEDIN_PASSWORD="your_password" \ -p 8080:8080 \ - stickerdaniel/linkedin-mcp-server \ + stickerdaniel/linkedin-mcp-server:latest \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` **Test with mcp inspector:** @@ -161,7 +163,7 @@ docker run -it --rm \ **Run the server with the `--get-cookie` flag:** ```bash docker run -it --rm \ - stickerdaniel/linkedin-mcp-server \ + stickerdaniel/linkedin-mcp-server:latest \ --get-cookie ``` Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. diff --git a/manifest.json b/manifest.json index faa61451..0687446a 100644 --- a/manifest.json +++ b/manifest.json @@ -23,8 +23,9 @@ "mcp_config": { "command": "docker", "args": [ - "run", "--rm", + "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", + "-e", "LOG_LEVEL=INFO", "stickerdaniel/linkedin-mcp-server:latest" ] } diff --git a/pyproject.toml b/pyproject.toml index f6a11746..12d86354 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.3" +version = "1.2.4" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 57de1b87..ddeba51c 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.3" +version = "1.2.4" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From fe362d261df0aabb14ff303f54c4d9134eeaa97b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:29:15 +0000 Subject: [PATCH 190/565] chore(dxt): update manifest.json version to v1.2.4 [skip ci] --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 0687446a..641b2fc9 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.3", + "version": "1.2.4", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { From e1e775d0860b190b6f28442dff2558c50d38feb6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 15:39:20 -0400 Subject: [PATCH 191/565] feat(dxt): updated release workflow to update dxt for pinned container tag. Improves stability and ensures the dxt matching tag is pulled automatically. --- .github/workflows/release.yml | 20 ++++++++++++++------ README.md | 7 ++----- main.py | 10 ++++++---- manifest.json | 4 ++-- 4 files changed, 24 insertions(+), 17 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index eaef23a4..cd126688 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -83,10 +83,11 @@ jobs: echo "โœ… Created and pushed tag v$VERSION" fi - - name: Update manifest.json version + - name: Update manifest.json version and Docker image run: | set -e sed -i 's/"version": ".*"/"version": "'$VERSION'"/' manifest.json + sed -i 's/stickerdaniel\/linkedin-mcp-server:[^"]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' manifest.json echo "โœ… Updated manifest.json to version $VERSION" - name: Commit manifest update @@ -143,16 +144,23 @@ jobs: body: | For an installation guide, refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). - ## Update Docker Image - **Pull this release's image:** + ## ๐Ÿณ Update Docker Installation + **For users with Docker-based MCP client configurations:** + ```bash + docker pull stickerdaniel/linkedin-mcp-server:latest + ``` + The `latest` tag will always point to the most recent release. + To pull this specific version, run: ```bash docker pull stickerdaniel/linkedin-mcp-server:${{ env.VERSION }} ``` - ## Update Claude Desktop DXT Extension + ## ๐Ÿ“ฆ Update DXT Extension Installation + **For Claude Desktop users:** 1. Download the `.dxt` file below - 2. Double-click to open in Claude Desktop - 3. Set your LinkedIn cookie + 2. Double-click to install in Claude Desktop + + This DXT extension uses the pinned version `${{ env.VERSION }}`, the Docker image will be pulled automatically. - name: Summary run: | diff --git a/README.md b/README.md index c6403ba5..04e69602 100644 --- a/README.md +++ b/README.md @@ -54,12 +54,10 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "args": [ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE", - "-e", "LOG_LEVEL", "stickerdaniel/linkedin-mcp-server:latest" ], "env": { - "LINKEDIN_COOKIE": "XXXXXX...", - "LOG_LEVEL": "INFO" + "LINKEDIN_COOKIE": "XXXXXX..." } } } @@ -114,8 +112,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **HTTP Mode Example (for web-based MCP clients):** ```bash docker run -it --rm \ - -e LINKEDIN_EMAIL="your.email@example.com" \ - -e LINKEDIN_PASSWORD="your_password" \ + -e LINKEDIN_COOKIE="your_linkedin_cookie" \ -p 8080:8080 \ stickerdaniel/linkedin-mcp-server:latest \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp diff --git a/main.py b/main.py index fc7c16bc..b63290e3 100644 --- a/main.py +++ b/main.py @@ -323,12 +323,14 @@ def main() -> None: logger.info("Authentication ready") except CredentialsNotFoundError as e: logger.error(f"Authentication setup failed: {e}") - if not config.is_interactive: - print("\nโŒ LinkedIn cookie required for Docker/non-interactive mode") - else: + if config.is_interactive: print( - "\nโŒ Authentication required - please provide LinkedIn authentication" + "\nโŒ Authentication required - please provide LinkedIn's li_at cookie" ) + else: + # TODO: make claude desktop handle this without terminating + print("\nโŒ Cookie required for Docker/non-interactive mode") + sys.exit(1) except KeyboardInterrupt: print("\n\n๐Ÿ‘‹ Setup cancelled by user") diff --git a/manifest.json b/manifest.json index 641b2fc9..9b26d92c 100644 --- a/manifest.json +++ b/manifest.json @@ -25,8 +25,8 @@ "args": [ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", - "-e", "LOG_LEVEL=INFO", - "stickerdaniel/linkedin-mcp-server:latest" + "-e", "LOG_LEVEL=DEBUG", + "stickerdaniel/linkedin-mcp-server:1.2.4" ] } }, From 6eb9e2964752f0703ff32f02af638f5d99b10f5d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 15:39:42 -0400 Subject: [PATCH 192/565] chore(version): bump version to 1.2.5 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 12d86354..1bccce2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.4" +version = "1.2.5" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index ddeba51c..02301a2f 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.4" +version = "1.2.5" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 48612d81ac7bf631c8027f429c061fdd086a83bc Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 19:40:10 +0000 Subject: [PATCH 193/565] chore(dxt): update manifest.json version to v1.2.5 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 9b26d92c..ada01dbd 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.4", + "version": "1.2.5", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.2.4" + "stickerdaniel/linkedin-mcp-server:1.2.5" ] } }, From 797c37a97f4a261365a40821e93f6d9e32c8f826 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:42:22 -0400 Subject: [PATCH 194/565] Update README.md --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 04e69602..075ee5d5 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,9 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 ## Usage Examples - +``` +What are my recommended jobs I can apply to? +``` ``` Research the background of this candidate https://www.linkedin.com/in/stickerdaniel/ ``` From 51d972f981ed08417e20eb672c8f1c6e5397c423 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 16:40:07 -0400 Subject: [PATCH 195/565] feat(release): rename DXT file with version suffix --- .github/workflows/release.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cd126688..1aee81fa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -127,7 +127,9 @@ jobs: run: uv cache prune --ci - name: Build DXT extension - run: bunx @anthropic-ai/dxt pack + run: | + bunx @anthropic-ai/dxt pack + mv linkedin-mcp-server.dxt linkedin-mcp-server-v$VERSION.dxt - name: Create GitHub Release env: From 41fcced04a8749a4e83e3df2f59fc191181bd125 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 17:58:54 -0400 Subject: [PATCH 196/565] fix(main): improve shutdown messages formatting --- main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index b63290e3..2f8db748 100644 --- a/main.py +++ b/main.py @@ -420,7 +420,7 @@ def main() -> None: mcp.run(transport=transport) except KeyboardInterrupt: - print("โน๏ธ Server stopped by user") + print("\nโน๏ธ Server stopped by user") exit_gracefully(0) except Exception as e: logger.error(f"Server runtime error: {e}") @@ -430,7 +430,7 @@ def main() -> None: def exit_gracefully(exit_code: int = 0) -> None: """Exit the application gracefully, cleaning up resources.""" - print("\n๐Ÿ‘‹ Shutting down LinkedIn MCP server...") + print("๐Ÿ‘‹ Shutting down LinkedIn MCP server...") # Clean up drivers close_all_drivers() From 5891c09185c565ab0262233727591f44b0137c88 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 18:02:34 -0400 Subject: [PATCH 197/565] refactor(release): move tag creation step in workflow to a later stage --- .github/workflows/release.yml | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1aee81fa..570fb9c7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,20 +69,6 @@ jobs: - name: Set up Bun uses: oven-sh/setup-bun@v1 - - name: Create release tag - run: | - set -e - git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" - git config --local user.name "github-actions[bot]" - - if git tag -l "v$VERSION" | grep -q "v$VERSION"; then - echo "โš ๏ธ Tag v$VERSION already exists, skipping tag creation" - else - git tag "v$VERSION" - git push origin "v$VERSION" - echo "โœ… Created and pushed tag v$VERSION" - fi - - name: Update manifest.json version and Docker image run: | set -e @@ -93,6 +79,8 @@ jobs: - name: Commit manifest update run: | set -e + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" git add manifest.json if git diff --staged --quiet; then echo "โ„น๏ธ No changes to commit" @@ -102,6 +90,20 @@ jobs: echo "โœ… Committed manifest.json update" fi + - name: Create release tag + run: | + set -e + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + if git tag -l "v$VERSION" | grep -q "v$VERSION"; then + echo "โš ๏ธ Tag v$VERSION already exists, skipping tag creation" + else + git tag "v$VERSION" + git push origin "v$VERSION" + echo "โœ… Created and pushed tag v$VERSION" + fi + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 From 09c512c951fefe230212276c5ed26a69398c7b93 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 18:03:11 -0400 Subject: [PATCH 198/565] chore(version): bump version to 1.2.6 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1bccce2f..9488c859 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.5" +version = "1.2.6" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 02301a2f..8648df52 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.5" +version = "1.2.6" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 59b8534490c6c8ff2c700ae556575637158a8f49 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 22:03:36 +0000 Subject: [PATCH 199/565] chore(dxt): update manifest.json version to v1.2.6 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index ada01dbd..478022bd 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.5", + "version": "1.2.6", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.2.5" + "stickerdaniel/linkedin-mcp-server:1.2.6" ] } }, From 8fe20c9156a053ad0765bfc1ffe9ffd947b90fbc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 8 Jul 2025 18:10:05 -0400 Subject: [PATCH 200/565] docs(release): add restart instruction for Claude Desktop --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 570fb9c7..e978b808 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -163,6 +163,7 @@ jobs: **For Claude Desktop users:** 1. Download the `.dxt` file below 2. Double-click to install in Claude Desktop + 3. Restart Claude Desktop This DXT extension uses the pinned version `${{ env.VERSION }}`, the Docker image will be pulled automatically. From fb2bf58e63f5daddac277ac234cb25898c36bdbb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 9 Jul 2025 12:02:44 -0400 Subject: [PATCH 201/565] feat(release): generate release notes from template --- .github/workflows/release.yml | 27 ++++++--------------------- RELEASE_NOTES_TEMPLATE.md | 20 ++++++++++++++++++++ 2 files changed, 26 insertions(+), 21 deletions(-) create mode 100644 RELEASE_NOTES_TEMPLATE.md diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e978b808..b2c9d1fa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -133,6 +133,11 @@ jobs: bunx @anthropic-ai/dxt pack mv linkedin-mcp-server.dxt linkedin-mcp-server-v$VERSION.dxt + - name: Generate release notes + run: | + envsubst < RELEASE_NOTES_TEMPLATE.md > RELEASE_NOTES.md + echo "โœ… Generated release notes from template" + - name: Create GitHub Release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -145,27 +150,7 @@ jobs: draft: false prerelease: false name: "LinkedIn MCP Server v${{ env.VERSION }}" - body: | - For an installation guide, refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). - - ## ๐Ÿณ Update Docker Installation - **For users with Docker-based MCP client configurations:** - ```bash - docker pull stickerdaniel/linkedin-mcp-server:latest - ``` - The `latest` tag will always point to the most recent release. - To pull this specific version, run: - ```bash - docker pull stickerdaniel/linkedin-mcp-server:${{ env.VERSION }} - ``` - - ## ๐Ÿ“ฆ Update DXT Extension Installation - **For Claude Desktop users:** - 1. Download the `.dxt` file below - 2. Double-click to install in Claude Desktop - 3. Restart Claude Desktop - - This DXT extension uses the pinned version `${{ env.VERSION }}`, the Docker image will be pulled automatically. + body_path: RELEASE_NOTES.md - name: Summary run: | diff --git a/RELEASE_NOTES_TEMPLATE.md b/RELEASE_NOTES_TEMPLATE.md new file mode 100644 index 00000000..0aa23401 --- /dev/null +++ b/RELEASE_NOTES_TEMPLATE.md @@ -0,0 +1,20 @@ +For an installation guide, refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). + +## ๐Ÿณ Update Docker Installation +**For users with Docker-based MCP client configurations:** +```bash +docker pull stickerdaniel/linkedin-mcp-server:latest +``` +The `latest` tag will always point to the most recent release. +To pull this specific version, run: +```bash +docker pull stickerdaniel/linkedin-mcp-server:${VERSION} +``` + +## ๐Ÿ“ฆ Update DXT Extension Installation +**For Claude Desktop users:** +1. Download the `.dxt` file below +2. Double-click to install in Claude Desktop +3. Restart Claude Desktop + +This DXT extension uses the pinned version `${VERSION}`, the Docker image will be pulled automatically. From 925c8dcf17d2e3c25afeccbbdf9cc3874c4bdc1d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 9 Jul 2025 14:27:03 -0400 Subject: [PATCH 202/565] docs(README): remove redundant status line --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 075ee5d5..2869f6f7 100644 --- a/README.md +++ b/README.md @@ -25,8 +25,6 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ``` ## Features & Tool Status - -**Current Status: All Tools Working** > [!TIP] > - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections > - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name From a62d6b64fa318fe852c8382047911c8876774e01 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 11 Jul 2025 17:16:17 -0400 Subject: [PATCH 203/565] Create LICENSE --- LICENSE | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..86f2b21b --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Daniel Sticker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From 7d88220bee8f0a93cca27d77e5816dd913248dc6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 11 Jul 2025 17:43:07 -0400 Subject: [PATCH 204/565] Update LICENSE --- LICENSE | 682 ++++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 661 insertions(+), 21 deletions(-) diff --git a/LICENSE b/LICENSE index 86f2b21b..0ad25db4 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,661 @@ -MIT License - -Copyright (c) 2025 Daniel Sticker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. From f463a5b007c9dcd89338f66fc09f29c235d01d0e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 11 Jul 2025 17:52:57 -0400 Subject: [PATCH 205/565] docs(company, job, person): update tool descriptions and remove outdated issue note --- linkedin_mcp_server/tools/company.py | 2 +- linkedin_mcp_server/tools/job.py | 11 ++++------- linkedin_mcp_server/tools/person.py | 4 ++-- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index f986f140..5a503584 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -30,7 +30,7 @@ async def get_company_profile( company_name: str, get_employees: bool = False ) -> Dict[str, Any]: """ - Scrape a company's LinkedIn profile. + Get a specific company's LinkedIn profile. Args: company_name (str): LinkedIn company name (e.g., "docker", "anthropic", "microsoft") diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 81794868..d6513522 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -32,10 +32,7 @@ def register_job_tools(mcp: FastMCP) -> None: @mcp.tool() async def get_job_details(job_id: str) -> Dict[str, Any]: """ - Scrape job details from a LinkedIn job posting. - - This tool extracts comprehensive job information including title, company, - location, posting date, application count, and full job description. + Get job details for a specific job posting on LinkedIn Args: job_id (str): LinkedIn job ID (e.g., "4252026496", "3856789012") @@ -61,10 +58,10 @@ async def get_job_details(job_id: str) -> Dict[str, Any]: @mcp.tool() async def search_jobs(search_term: str) -> List[Dict[str, Any]]: """ - Search for jobs on LinkedIn (Note: This tool has compatibility issues). + Search for jobs on LinkedIn using a search term. Args: - search_term (str): The search term to use for job search + search_term (str): Search term to use for the job search. Returns: List[Dict[str, Any]]: List of job search results @@ -84,7 +81,7 @@ async def search_jobs(search_term: str) -> List[Dict[str, Any]]: @mcp.tool() async def get_recommended_jobs() -> List[Dict[str, Any]]: """ - Get recommended jobs from LinkedIn (Note: This tool has compatibility issues). + Get your personalized recommended jobs from LinkedIn Returns: List[Dict[str, Any]]: List of recommended jobs diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 750bb003..c9428a7d 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -28,10 +28,10 @@ def register_person_tools(mcp: FastMCP) -> None: @mcp.tool() async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: """ - Scrape a person's LinkedIn profile. + Get a specific person's LinkedIn profile. Args: - linkedin_username (str): LinkedIn username (e.g., "john-doe-123456", "sarah-smith", "stickerdaniel") + linkedin_username (str): LinkedIn username (e.g., "stickerdaniel", "anistji") Returns: Dict[str, Any]: Structured data from the person's profile From 93940165fdcf76aab47df124e824c15b6fcb685a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 11 Jul 2025 17:55:23 -0400 Subject: [PATCH 206/565] docs(README): update license to AGPL v3.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2869f6f7..b89a0b5f 100644 --- a/README.md +++ b/README.md @@ -312,6 +312,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ ## License -This project is licensed under the MIT License +This project is licensed under the GNU Affero General Public License v3.0.
From 09ef771e0b6536c7e8ee8d219a055fe7313a9016 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 11 Jul 2025 21:09:02 -0400 Subject: [PATCH 207/565] docs(issue_template): refine feature request structure --- .github/ISSUE_TEMPLATE/feature_request.md | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index ed4490f7..31328c57 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -7,14 +7,11 @@ assignees: '' --- -## What feature would you like? -Describe what you want to happen and why it would be useful. +## Feature description +Describe what you want to happen. ## Use case -How would you use this feature? -``` -Example: "Claude, get me the skills from this LinkedIn profile: [URL]" -``` +Why this feature is useful. ## Suggested implementation If you have a specific idea for how to implement this feature, please describe it here. From 161ee11f7c5f1665057f6e0fa7722cf2926dd8dc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 12 Jul 2025 17:37:53 -0400 Subject: [PATCH 208/565] docs(README): fix DXT extension download link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b89a0b5f..9d643457 100644 --- a/README.md +++ b/README.md @@ -149,7 +149,7 @@ docker run -it --rm \ **Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed **One-click installation** for Claude Desktop users: -1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) +1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop 3. Set your LinkedIn cookie in the extension settings From 19dd0ae7a176078a693e00558ae9cc99dd9b9486 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 14 Jul 2025 19:32:25 -0400 Subject: [PATCH 209/565] feat: add configurable user agent support (#28) * feat(chrome): add custom user agent support * docs(README): add user agent option to documentation --- README.md | 3 +++ linkedin_mcp_server/config/loaders.py | 13 +++++++++++++ linkedin_mcp_server/config/schema.py | 1 + linkedin_mcp_server/drivers/chrome.py | 18 +++++++++++++++--- 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9d643457..779e1167 100644 --- a/README.md +++ b/README.md @@ -108,6 +108,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c - `--path PATH` - HTTP server path (default: /mcp) - `--get-cookie` - Attempt to login with email and password and extract the LinkedIn cookie - `--cookie {cookie}` - Pass a specific LinkedIn cookie for login +- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection **HTTP Mode Example (for web-based MCP clients):** ```bash @@ -117,6 +118,7 @@ docker run -it --rm \ stickerdaniel/linkedin-mcp-server:latest \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` + **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` 2. Click pre-filled token url to open the inspector in your browser @@ -245,6 +247,7 @@ uv run main.py --no-headless --no-lazy-init - `--get-cookie` - Login with email and password and extract the LinkedIn cookie - `--clear-keychain` - Clear all stored LinkedIn credentials and cookies from system keychain - `--cookie {cookie}` - Pass a specific LinkedIn cookie for login +- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index d8fce518..0333539a 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -46,6 +46,7 @@ class EnvironmentKeys: # Chrome configuration CHROMEDRIVER = "CHROMEDRIVER" HEADLESS = "HEADLESS" + USER_AGENT = "USER_AGENT" # Server configuration LOG_LEVEL = "LOG_LEVEL" @@ -120,6 +121,9 @@ def load_from_env(config: AppConfig) -> AppConfig: if chromedriver := os.environ.get(EnvironmentKeys.CHROMEDRIVER): config.chrome.chromedriver_path = chromedriver + if user_agent := os.environ.get(EnvironmentKeys.USER_AGENT): + config.chrome.user_agent = user_agent + # Log level if log_level_env := os.environ.get(EnvironmentKeys.LOG_LEVEL): log_level_upper = log_level_env.upper() @@ -225,6 +229,12 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Specify LinkedIn cookie directly", ) + parser.add_argument( + "--user-agent", + type=str, + help="Specify custom user agent string to prevent anti-scraping detection", + ) + args = parser.parse_args() # Update configuration with parsed arguments @@ -261,6 +271,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.cookie: config.linkedin.cookie = args.cookie + if args.user_agent: + config.chrome.user_agent = args.user_agent + return config diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 07fb4f53..77f94639 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -31,6 +31,7 @@ class ChromeConfig: headless: bool = True chromedriver_path: Optional[str] = None browser_args: List[str] = field(default_factory=list) + user_agent: Optional[str] = None @dataclass diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 82a2eb40..3cbcdd39 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -9,6 +9,7 @@ import logging import os +import platform from typing import Dict, Optional from linkedin_scraper.exceptions import ( @@ -27,8 +28,19 @@ from linkedin_mcp_server.config import get_config from linkedin_mcp_server.exceptions import DriverInitializationError + # Constants -DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" +def get_default_user_agent() -> str: + """Get platform-specific default user agent to reduce fingerprinting.""" + system = platform.system() + + if system == "Windows": + return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" + elif system == "Darwin": # macOS + return "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" + else: # Linux and others + return "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" + # Global driver storage to reuse sessions active_drivers: Dict[str, webdriver.Chrome] = {} @@ -72,8 +84,8 @@ def create_chrome_options(config) -> Options: chrome_options.add_argument("--aggressive-cache-discard") chrome_options.add_argument("--disable-ipc-flooding-protection") - # Set user agent (configurable with sensible default) - user_agent = getattr(config.chrome, "user_agent", DEFAULT_USER_AGENT) + # Set user agent (configurable with platform-specific default) + user_agent = config.chrome.user_agent or get_default_user_agent() chrome_options.add_argument(f"--user-agent={user_agent}") # Add any custom browser arguments from config From bf42802e1d084c6618e6732aacda338978d0e140 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 14 Jul 2025 19:42:49 -0400 Subject: [PATCH 210/565] chore(release): bump version to 1.3.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9488c859..f244edd4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.2.6" +version = "1.3.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 8648df52..d30a3f4e 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.2.6" +version = "1.3.0" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From c2c92db75814effb316fdd695790ba0e8b1f8c43 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 23:43:28 +0000 Subject: [PATCH 211/565] chore(dxt): update manifest.json version to v1.3.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 478022bd..3d637720 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.2.6", + "version": "1.3.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.2.6" + "stickerdaniel/linkedin-mcp-server:1.3.0" ] } }, From 98f3c54a593babc3f9fb982ea127e8019f02b9eb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 14 Jul 2025 19:51:01 -0400 Subject: [PATCH 212/565] fix(manifest): remove parameters from tool descriptions --- manifest.json | 67 +++++---------------------------------------------- 1 file changed, 6 insertions(+), 61 deletions(-) diff --git a/manifest.json b/manifest.json index 3d637720..810c14e2 100644 --- a/manifest.json +++ b/manifest.json @@ -33,82 +33,27 @@ "tools": [ { "name": "get_person_profile", - "description": "Get detailed information from a LinkedIn profile including work history, education, skills, and connections", - "parameters": { - "type": "object", - "properties": { - "linkedin_username": { - "type": "string", - "description": "LinkedIn username (e.g., \"stickerdaniel\", \"john-doe-123456\")" - } - }, - "required": ["linkedin_username"] - } + "description": "Get detailed information from a LinkedIn profile including work history, education, skills, and connections" }, { "name": "get_company_profile", - "description": "Extract comprehensive company information and details", - "parameters": { - "type": "object", - "properties": { - "company_name": { - "type": "string", - "description": "LinkedIn company name (e.g., \"docker\", \"anthropic\", \"microsoft\")" - }, - "get_employees": { - "type": "boolean", - "description": "Whether to scrape the company's employees (slower)", - "default": false - } - }, - "required": ["company_name"] - } + "description": "Extract comprehensive company information and details" }, { "name": "get_job_details", - "description": "Retrieve specific job posting details using LinkedIn job IDs", - "parameters": { - "type": "object", - "properties": { - "job_id": { - "type": "string", - "description": "LinkedIn job ID (e.g., \"4252026496\", \"3856789012\")" - } - }, - "required": ["job_id"] - } + "description": "Retrieve specific job posting details using LinkedIn job IDs" }, { "name": "search_jobs", - "description": "Search for jobs with filters like keywords and location", - "parameters": { - "type": "object", - "properties": { - "search_term": { - "type": "string", - "description": "Search term for job search (e.g., \"software engineer\", \"product manager\")" - } - }, - "required": ["search_term"] - } + "description": "Search for jobs with filters like keywords and location" }, { "name": "get_recommended_jobs", - "description": "Get personalized job recommendations based on your profile", - "parameters": { - "type": "object", - "properties": {}, - "required": [] - } + "description": "Get personalized job recommendations based on your profile" }, { "name": "close_session", - "description": "Properly close browser session and clean up resources", - "parameters": { - "type": "object", - "properties": {}, - "required": [] - } + "description": "Properly close browser session and clean up resources" } ], "user_config": { From 831f6257be69e6775cd37a88dc542b9dbea405be Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 14 Jul 2025 19:51:33 -0400 Subject: [PATCH 213/565] chore(release): bump version to 1.3.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f244edd4..03e78ede 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.3.0" +version = "1.3.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index d30a3f4e..a07d6636 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.3.0" +version = "1.3.1" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From 8adb788b0e0127198b34a0c612f2de18b0f071f1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 14 Jul 2025 23:51:59 +0000 Subject: [PATCH 214/565] chore(dxt): update manifest.json version to v1.3.1 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 810c14e2..d32a7676 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.3.0", + "version": "1.3.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.3.0" + "stickerdaniel/linkedin-mcp-server:1.3.1" ] } }, From 29fc79356c394626015a2bac8359a1742c7f3cb5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Tue, 15 Jul 2025 12:35:04 -0400 Subject: [PATCH 215/565] Update LICENSE --- LICENSE | 862 +++++++++++++------------------------------------------- 1 file changed, 201 insertions(+), 661 deletions(-) diff --git a/LICENSE b/LICENSE index 0ad25db4..d5d41d94 100644 --- a/LICENSE +++ b/LICENSE @@ -1,661 +1,201 @@ - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2025 Daniel Sticker + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From e3acd2e8b23f1a7d118202b18adeec1f6bb22c63 Mon Sep 17 00:00:00 2001 From: Atharva Domale <115187316+AtharvaDomale@users.noreply.github.com> Date: Tue, 15 Jul 2025 13:02:41 -0400 Subject: [PATCH 216/565] fix(cli): fix UnicodeEncodeError on Windows by enforcing UTF-8 output (#30) Co-authored-by: Atharva Domale Co-authored-by: Daniel Sticker --- main.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/main.py b/main.py index 2f8db748..fd072027 100644 --- a/main.py +++ b/main.py @@ -9,6 +9,7 @@ """ +import io import logging import sys from typing import Literal @@ -36,6 +37,8 @@ from linkedin_mcp_server.server import create_mcp_server, shutdown_handler from linkedin_mcp_server.setup import run_cookie_extraction_setup, run_interactive_setup +sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") + logger = logging.getLogger(__name__) From 6bb065edb2ddf44baeb92cd7c0443e6030283f81 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 15 Jul 2025 13:04:09 -0400 Subject: [PATCH 217/565] chore(release): bump version to 1.3.2 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 03e78ede..6e02c20a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.3.1" +version = "1.3.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index a07d6636..d7c5a516 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.3.1" +version = "1.3.2" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From bdb8d9db91313ea4404e5983ee7810a9803aeabb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:04:41 +0000 Subject: [PATCH 218/565] chore(dxt): update manifest.json version to v1.3.2 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index d32a7676..a221fa10 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.3.1", + "version": "1.3.2", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.3.1" + "stickerdaniel/linkedin-mcp-server:1.3.2" ] } }, From a2eff30959acc8017f19505134c4bbe41ed2e22c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 15 Jul 2025 15:06:35 -0400 Subject: [PATCH 219/565] docs(README): add CI and license badges --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 779e1167..9ff0a855 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,11 @@ # LinkedIn MCP Server +

+ CI Status + Release + License +

+ Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your machine. ## Installation Methods From 714bb702c3dc6af691144dded73c561f534283ce Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Wed, 16 Jul 2025 16:39:54 -0400 Subject: [PATCH 220/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9ff0a855..c468f45f 100644 --- a/README.md +++ b/README.md @@ -321,6 +321,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ ## License -This project is licensed under the GNU Affero General Public License v3.0. +This project is licensed under the Apache 2.0 license.
From e333eeb8e17ef5cc276d147411abf62d477ccd74 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 25 Jul 2025 16:18:07 -0400 Subject: [PATCH 221/565] docs(GeminiCLI): add example mcp setup config --- .gemini/settings.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .gemini/settings.json diff --git a/.gemini/settings.json b/.gemini/settings.json new file mode 100644 index 00000000..eae4070e --- /dev/null +++ b/.gemini/settings.json @@ -0,0 +1,7 @@ +{ + "mcpServers": { + "linkedin-mcp-server": { + "httpUrl": "http://127.0.0.1:8000/mcp" + } + } +} From be826500ec1093b433e1b00f6296428ee1628377 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 25 Jul 2025 16:22:13 -0400 Subject: [PATCH 222/565] fix(chrome): increase timeout for cookie authentication from 15 to 30 seconds --- linkedin_mcp_server/drivers/chrome.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index 3cbcdd39..f5f13ba1 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -211,8 +211,8 @@ def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: logger.info("Attempting cookie authentication...") - # Set shorter timeout for faster failure detection - driver.set_page_load_timeout(15) + # Set timeout for cookie authentication - longer to handle LinkedIn's slow redirects + driver.set_page_load_timeout(30) actions.login(driver, cookie=cookie) From 5672f766a04f86a98b8f17d159a25e90bbfc871c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 25 Jul 2025 16:22:36 -0400 Subject: [PATCH 223/565] chore(release): bump version to 1.3.3 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6e02c20a..8d75ee48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.3.2" +version = "1.3.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index d7c5a516..bc383003 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.3.2" +version = "1.3.3" source = { virtual = "." } dependencies = [ { name = "fastmcp" }, From ebbbe1acd31e0d8a38363e1826e1f1cfa5c53737 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 25 Jul 2025 20:23:00 +0000 Subject: [PATCH 224/565] chore(dxt): update manifest.json version to v1.3.3 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index a221fa10..fab02ce8 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.3.2", + "version": "1.3.3", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.3.2" + "stickerdaniel/linkedin-mcp-server:1.3.3" ] } }, From 76926632cf8a99dcbe4ac92737d170c24a977810 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 4 Aug 2025 20:28:14 -0400 Subject: [PATCH 225/565] refactor(chrome): streamline cookie authentication process - Simplify login logic by reducing retry attempts to one - Enhance error handling for InvalidCredentialsError from linkedin-scraper - Improve logging for authentication status checks and unexpected pages - Remove redundant logging and waiting steps for clarity The core logic remains the same: 1. Set 45s timeout (invalid cookies cause indefinite loading) 2. Attempt login with cookie 3. Handle TimeoutException as invalid cookie indicator Handle InvalidCredentialsError with URL verification 5. Simple 1-retry mechanism if needed --- linkedin_mcp_server/drivers/chrome.py | 107 +++++++++++++++++++++----- 1 file changed, 89 insertions(+), 18 deletions(-) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py index f5f13ba1..56a14bf4 100644 --- a/linkedin_mcp_server/drivers/chrome.py +++ b/linkedin_mcp_server/drivers/chrome.py @@ -206,34 +206,105 @@ def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: Returns: bool: True if login was successful, False otherwise """ + import time + try: from linkedin_scraper import actions # type: ignore + from selenium.common.exceptions import TimeoutException logger.info("Attempting cookie authentication...") - # Set timeout for cookie authentication - longer to handle LinkedIn's slow redirects - driver.set_page_load_timeout(30) + # Set longer timeout to handle slow LinkedIn loading + # Invalid cookies cause indefinite loading, so timeout is our detection mechanism + driver.set_page_load_timeout(45) + + # Attempt login + retry_count = 0 + max_retries = 1 + + while retry_count <= max_retries: + try: + actions.login(driver, cookie=cookie) + # If we reach here without timeout, login attempt completed + break + except TimeoutException: + # Timeout indicates invalid cookie (page loads forever) + logger.warning( + "Cookie authentication failed - page load timeout (likely invalid cookie)" + ) + return False + except Exception as e: + # Handle InvalidCredentialsError from linkedin-scraper + # This library sometimes incorrectly reports failure even when login succeeds + if "InvalidCredentialsError" in str( + type(e) + ) or "Cookie login failed" in str(e): + logger.info( + "LinkedIn-scraper reported InvalidCredentialsError - verifying actual authentication status..." + ) + # Give LinkedIn time to complete redirect + time.sleep(2) + break + else: + logger.warning(f"Login attempt failed: {e}") + if retry_count < max_retries: + retry_count += 1 + logger.info( + f"Retrying authentication (attempt {retry_count + 1}/{max_retries + 1})" + ) + time.sleep(2) + continue + else: + return False + + # Check authentication status by examining the current URL + try: + current_url = driver.current_url - actions.login(driver, cookie=cookie) + # Check if we're on login page (authentication failed) + if "login" in current_url or "uas/login" in current_url: + logger.warning( + "Cookie authentication failed - redirected to login page" + ) + return False - # Quick check - if we're on login page, cookie is invalid - current_url = driver.current_url - if "login" in current_url or "uas/login" in current_url: - logger.warning("Cookie authentication failed - redirected to login page") - return False - elif ( - "feed" in current_url - or "mynetwork" in current_url - or "linkedin.com/in/" in current_url - ): - logger.info("Cookie authentication successful") - return True - else: - logger.warning("Cookie authentication failed - unexpected page") + # Check if we're on authenticated pages (authentication succeeded) + elif any( + indicator in current_url + for indicator in ["feed", "mynetwork", "linkedin.com/in/", "/feed/"] + ): + logger.info("Cookie authentication successful") + return True + + # Unexpected page - wait briefly and check again + else: + logger.info( + "Unexpected page after login, checking authentication status..." + ) + time.sleep(2) + + final_url = driver.current_url + if "login" in final_url or "uas/login" in final_url: + logger.warning("Cookie authentication failed - ended on login page") + return False + elif any( + indicator in final_url + for indicator in ["feed", "mynetwork", "linkedin.com/in/", "/feed/"] + ): + logger.info("Cookie authentication successful after verification") + return True + else: + logger.warning( + f"Cookie authentication uncertain - unexpected final page: {final_url}" + ) + return False + + except Exception as e: + logger.error(f"Error checking authentication status: {e}") return False except Exception as e: - logger.warning(f"Cookie authentication failed: {e}") + logger.error(f"Cookie authentication failed with error: {e}") return False finally: # Restore normal timeout From 142cb2ee1bc2d81703d2ea0c946ddfd39daffb3e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 4 Aug 2025 22:43:39 -0400 Subject: [PATCH 226/565] feat(pyproject): add uvx support for direct installation from GitHub - Add build-system configuration for setuptools - Add tool.setuptools.packages.find configuration - Add project.scripts entry point - Include main.py as a py-module - Add __main__.py for package entry point This enables installation via: uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server Implements #36 --- linkedin_mcp_server/__main__.py | 8 ++++++++ pyproject.toml | 14 ++++++++++++++ uv.lock | 2 +- 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 linkedin_mcp_server/__main__.py diff --git a/linkedin_mcp_server/__main__.py b/linkedin_mcp_server/__main__.py new file mode 100644 index 00000000..7e78e50f --- /dev/null +++ b/linkedin_mcp_server/__main__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +"""Entry point for linkedin-mcp-server command.""" + +# Import main function from the main module at package root +import main + +if __name__ == "__main__": + main.main() diff --git a/pyproject.toml b/pyproject.toml index 8d75ee48..d6e67fc6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,20 @@ dependencies = [ "pyperclip>=1.9.0", ] +[project.scripts] +linkedin-mcp-server = "main:main" + +[build-system] +requires = ["setuptools>=68.0", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +py-modules = ["main"] + +[tool.setuptools.packages.find] +include = ["linkedin_mcp_server*"] +exclude = ["assets*", "docs*", "tests*"] + [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] diff --git a/uv.lock b/uv.lock index bc383003..1c0f6989 100644 --- a/uv.lock +++ b/uv.lock @@ -659,7 +659,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" version = "1.3.3" -source = { virtual = "." } +source = { editable = "." } dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, From a4676dd354d1d52c3d77c18655475188cacf97a1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 4 Aug 2025 22:50:40 -0400 Subject: [PATCH 227/565] docs(readme): add uvx installation instructions --- README.md | 54 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/README.md b/README.md index c468f45f..411f0f13 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) +[![uvx](https://img.shields.io/badge/uvx-Quick_Install-00E5FF?style=for-the-badge&logo=python&logoColor=white)](#-quick-installation-with-uvx) [![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#-local-setup-develop--contribute) https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 @@ -206,6 +207,59 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c

+## ๐Ÿš€ Quick Installation with uvx + +If you have [uv](https://docs.astral.sh/uv/) installed, you can run the LinkedIn MCP Server directly without cloning: + +```bash +# Run directly from GitHub (latest version) +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --help + +# Run with your LinkedIn cookie +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "your_linkedin_cookie" +``` + +**Client Configuration for uvx:** +```json +{ + "mcpServers": { + "linkedin": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/stickerdaniel/linkedin-mcp-server", + "linkedin-mcp-server" + ], + "env": { + "LINKEDIN_COOKIE": "your_linkedin_cookie_here" + } + } + } +} +``` + +
+โš ๏ธ Troubleshooting uvx + +**Installation issues:** +- Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` + +**Cookie Setup:** +1. Open Chrome DevTools on LinkedIn.com (F12) +2. Go to Application โ†’ Cookies โ†’ linkedin.com +3. Find the cookie named `li_at` +4. Copy the entire value (starts with `AQE...`) +5. Format as: `li_at=YOUR_COOKIE_VALUE` +6. Use this value as your `LINKEDIN_COOKIE` in the configuration + +**Authentication:** +- Cookie can be passed via `--cookie` flag or `LINKEDIN_COOKIE` environment variable +- Make sure you have only one active LinkedIn session per cookie +
+ +
+
+ ## ๐Ÿ Local Setup (Develop & Contribute) **Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed From e4473a50fb0b0562105f7e2a1756b54837fc4c4a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 5 Aug 2025 10:30:18 -0400 Subject: [PATCH 228/565] refactor(structure): restructure package to follow Python best practices - Move main.py to linkedin_mcp_server/cli_main.py for proper package structure - Update __main__.py to import from cli_main instead of root main - Update pyproject.toml to use setuptools properly without py-modules - Fix all documentation references from main.py to -m linkedin_mcp_server - Update cookie format in README to consistently use li_at=YOUR_COOKIE_VALUE - Update Dockerfile ENTRYPOINT to use -m linkedin_mcp_server - Fix cli.py help references from main.py to proper command format This ensures the package can be installed and run correctly via uvx and follows standard Python package structure conventions. --- Dockerfile | 2 +- README.md | 14 +++++++------- linkedin_mcp_server/__main__.py | 5 ++--- linkedin_mcp_server/cli.py | 3 ++- main.py => linkedin_mcp_server/cli_main.py | 4 ++-- pyproject.toml | 5 +---- 6 files changed, 15 insertions(+), 18 deletions(-) rename main.py => linkedin_mcp_server/cli_main.py (99%) diff --git a/Dockerfile b/Dockerfile index 5fcbb3a0..746a1738 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,5 +25,5 @@ RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser # Set entrypoint and default arguments -ENTRYPOINT ["uv", "run", "main.py"] +ENTRYPOINT ["uv", "run", "-m", "linkedin_mcp_server"] CMD [] diff --git a/README.md b/README.md index 411f0f13..f649013c 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c "stickerdaniel/linkedin-mcp-server:latest" ], "env": { - "LINKEDIN_COOKIE": "XXXXXX..." + "LINKEDIN_COOKIE": "li_at=YOUR_COOKIE_VALUE" } } } @@ -120,7 +120,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **HTTP Mode Example (for web-based MCP clients):** ```bash docker run -it --rm \ - -e LINKEDIN_COOKIE="your_linkedin_cookie" \ + -e LINKEDIN_COOKIE="li_at=YOUR_COOKIE_VALUE" \ -p 8080:8080 \ stickerdaniel/linkedin-mcp-server:latest \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp @@ -216,7 +216,7 @@ If you have [uv](https://docs.astral.sh/uv/) installed, you can run the LinkedIn uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --help # Run with your LinkedIn cookie -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "your_linkedin_cookie" +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "li_at=YOUR_COOKIE_VALUE" ``` **Client Configuration for uvx:** @@ -231,7 +231,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp "linkedin-mcp-server" ], "env": { - "LINKEDIN_COOKIE": "your_linkedin_cookie_here" + "LINKEDIN_COOKIE": "li_at=YOUR_COOKIE_VALUE" } } } @@ -293,7 +293,7 @@ uv run pre-commit install # 5. Start the server once manually # You will be prompted to enter your LinkedIn credentials, and they will be securely stored in your OS keychain # Once logged in, your cookie will be stored in your OS keychain and used for subsequent runs until it expires -uv run main.py --no-headless --no-lazy-init +uv run -m linkedin_mcp_server --no-headless --no-lazy-init ``` ### Local Setup Help @@ -316,7 +316,7 @@ uv run main.py --no-headless --no-lazy-init **HTTP Mode Example (for web-based MCP clients):** ```bash -uv run main.py --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp +uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp ``` **Claude Desktop:** @@ -325,7 +325,7 @@ uv run main.py --transport streamable-http --host 127.0.0.1 --port 8000 --path / "mcpServers": { "linkedin": { "command": "uv", - "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "main.py"] + "args": ["--directory", "/path/to/linkedin-mcp-server", "run", "-m", "linkedin_mcp_server"] } } } diff --git a/linkedin_mcp_server/__main__.py b/linkedin_mcp_server/__main__.py index 7e78e50f..80dc0679 100644 --- a/linkedin_mcp_server/__main__.py +++ b/linkedin_mcp_server/__main__.py @@ -1,8 +1,7 @@ #!/usr/bin/env python3 """Entry point for linkedin-mcp-server command.""" -# Import main function from the main module at package root -import main +from linkedin_mcp_server.cli_main import main if __name__ == "__main__": - main.main() + main() diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index efce3c7a..5757c88b 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -46,7 +46,8 @@ def print_claude_config() -> None: "--directory", current_dir, "run", - "main.py", + "-m", + "linkedin_mcp_server", "--no-setup", ] diff --git a/main.py b/linkedin_mcp_server/cli_main.py similarity index 99% rename from main.py rename to linkedin_mcp_server/cli_main.py index fd072027..610455fb 100644 --- a/main.py +++ b/linkedin_mcp_server/cli_main.py @@ -1,6 +1,6 @@ -# main.py +# linkedin_mcp_server/cli_main.py """ -LinkedIn MCP Server - Main application entry point. +LinkedIn MCP Server - Main CLI application entry point. Implements a three-phase startup: 1. Authentication Setup Phase - Credential validation and session establishment diff --git a/pyproject.toml b/pyproject.toml index d6e67fc6..fe068226 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,15 +13,12 @@ dependencies = [ ] [project.scripts] -linkedin-mcp-server = "main:main" +linkedin-mcp-server = "linkedin_mcp_server.cli_main:main" [build-system] requires = ["setuptools>=68.0", "wheel"] build-backend = "setuptools.build_meta" -[tool.setuptools] -py-modules = ["main"] - [tool.setuptools.packages.find] include = ["linkedin_mcp_server*"] exclude = ["assets*", "docs*", "tests*"] From 2d692ce57d2c9f90cb4d51598cfaf5e266e29dc4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 5 Aug 2025 10:57:25 -0400 Subject: [PATCH 229/565] fix(cli): correct path to pyproject.toml in get_version function --- linkedin_mcp_server/cli_main.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 610455fb..4d345821 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -278,7 +278,9 @@ def get_version() -> str: import os import tomllib - pyproject_path = os.path.join(os.path.dirname(__file__), "pyproject.toml") + pyproject_path = os.path.join( + os.path.dirname(os.path.dirname(__file__)), "pyproject.toml" + ) with open(pyproject_path, "rb") as f: data = tomllib.load(f) return data["project"]["version"] From 740fd0852d5f775b584e725b856313d1def974d5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 6 Aug 2025 19:36:55 -0400 Subject: [PATCH 230/565] docs(readme): update LinkedIn cookie retrieval methods and ux guide --- README.md | 151 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 115 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index f649013c..a86e1060 100644 --- a/README.md +++ b/README.md @@ -73,17 +73,6 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ### Getting the LinkedIn Cookie
-๐Ÿณ Docker get-cookie method - -**Run the server with the `--get-cookie` flag:** -```bash -docker run -it --rm \ - stickerdaniel/linkedin-mcp-server:latest \ - --get-cookie -``` -Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. -
-
๐ŸŒ Chrome DevTools Guide 1. Open LinkedIn and login @@ -93,6 +82,17 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c 5. Copy the **Value** field (this is your LinkedIn session cookie) 6. Use this value as your `LINKEDIN_COOKIE` in the configuration +
+
+๐Ÿณ Docker get-cookie method + +**Run the server with the `--get-cookie` flag:** +```bash +docker run -it --rm \ + stickerdaniel/linkedin-mcp-server:latest \ + --get-cookie +``` +Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
> [!NOTE] @@ -164,17 +164,6 @@ docker run -it --rm \ ### Getting the LinkedIn Cookie
-๐Ÿณ Docker get-cookie method - -**Run the server with the `--get-cookie` flag:** -```bash -docker run -it --rm \ - stickerdaniel/linkedin-mcp-server:latest \ - --get-cookie -``` -Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method below. -
-
๐ŸŒ Chrome DevTools Guide 1. Open LinkedIn and login @@ -184,6 +173,17 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c 5. Copy the **Value** field (this is your LinkedIn session cookie) 6. Use this value as your `LINKEDIN_COOKIE` in the configuration +
+
+๐Ÿณ Docker get-cookie method + +**Run the server with the `--get-cookie` flag:** +```bash +docker run -it --rm \ + stickerdaniel/linkedin-mcp-server:latest \ + --get-cookie +``` +Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
> [!NOTE] @@ -207,9 +207,13 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c

-## ๐Ÿš€ Quick Installation with uvx +## ๐Ÿš€ uvx Setup (Quick Install - Universal) + +**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) installed. + +### Installation -If you have [uv](https://docs.astral.sh/uv/) installed, you can run the LinkedIn MCP Server directly without cloning: +Run directly from GitHub without cloning: ```bash # Run directly from GitHub (latest version) @@ -219,7 +223,38 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "li_at=YOUR_COOKIE_VALUE" ``` -**Client Configuration for uvx:** +### Getting the LinkedIn Cookie +
+๐ŸŒ Chrome DevTools Guide + +1. Open LinkedIn and login +2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) +3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +4. Find the cookie named `li_at` +5. Copy the **Value** field (this is your LinkedIn session cookie) +6. Use this value as your `LINKEDIN_COOKIE` in the configuration + +
+ +
+๐Ÿš€ uvx get-cookie method + +**Run the server with the `--get-cookie` flag:** +```bash +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ + linkedin-mcp-server --get-cookie +``` +Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. +
+ +> [!NOTE] +> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. + +### uvx Setup Help +
+๐Ÿ”ง Configuration + +**Client Configuration:** ```json { "mcpServers": { @@ -238,23 +273,67 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp } ``` +**Transport Modes:** +- **Default (stdio)**: Standard communication for local MCP servers +- **Streamable HTTP**: For web-based MCP server + +**CLI Options:** +- `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) +- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call +- `--transport {stdio,streamable-http}` - Set transport mode +- `--host HOST` - HTTP server host (default: 127.0.0.1) +- `--port PORT` - HTTP server port (default: 8000) +- `--path PATH` - HTTP server path (default: /mcp) +- `--get-cookie` - Attempt to login with email and password and extract the LinkedIn cookie +- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login +- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection + +**Basic Usage Examples:** +```bash +# Run with cookie from environment variable +LINKEDIN_COOKIE="YOUR_COOKIE_VALUE" uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server + +# Run with cookie via flag +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "YOUR_COOKIE_VALUE" + +# Run with debug logging +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --log-level DEBUG + +# Extract cookie with credentials +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-cookie +``` + +**HTTP Mode Example (for web-based MCP clients):** +```bash +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ + --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp +``` + +**Test with mcp inspector:** +1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` +2. Click pre-filled token url to open the inspector in your browser +3. Select `Streamable HTTP` as `Transport Type` +4. Set `URL` to `http://localhost:8080/mcp` +5. Connect +6. Test tools + +
+
-โš ๏ธ Troubleshooting uvx +โ— Troubleshooting **Installation issues:** - Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` +- Check uv version: `uv --version` (should be 0.4.0 or higher) -**Cookie Setup:** -1. Open Chrome DevTools on LinkedIn.com (F12) -2. Go to Application โ†’ Cookies โ†’ linkedin.com -3. Find the cookie named `li_at` -4. Copy the entire value (starts with `AQE...`) -5. Format as: `li_at=YOUR_COOKIE_VALUE` -6. Use this value as your `LINKEDIN_COOKIE` in the configuration - -**Authentication:** +**Cookie issues:** +- Ensure your LinkedIn cookie is set and correct - Cookie can be passed via `--cookie` flag or `LINKEDIN_COOKIE` environment variable -- Make sure you have only one active LinkedIn session per cookie +- Make sure you have only one active LinkedIn session per cookie at a time + +**Login issues:** +- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie +- You might get a captcha challenge if you logged in a lot of times in a short period

From 4e4709357c9b7e754ad9e52718a8d86dba78f5de Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 6 Aug 2025 19:37:17 -0400 Subject: [PATCH 231/565] chore(version): bump version to 1.4.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fe068226..891072d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.3.3" +version = "1.4.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 1c0f6989..0956c41a 100644 --- a/uv.lock +++ b/uv.lock @@ -658,7 +658,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.3.3" +version = "1.4.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 4e413b74ae0d7244841990b7bff8388d73ed42aa Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 6 Aug 2025 23:37:41 +0000 Subject: [PATCH 232/565] chore(dxt): update manifest.json version to v1.4.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index fab02ce8..c7e918b3 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.3.3", + "version": "1.4.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.3.3" + "stickerdaniel/linkedin-mcp-server:1.4.0" ] } }, From ed03d703b573d8e1c1ca19d87a4b53444567fcc7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 7 Aug 2025 00:06:47 -0400 Subject: [PATCH 233/565] Update README.md UVX badge & colors --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a86e1060..7f627463 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) -[![uvx](https://img.shields.io/badge/uvx-Quick_Install-00E5FF?style=for-the-badge&logo=python&logoColor=white)](#-quick-installation-with-uvx) -[![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#-local-setup-develop--contribute) +[![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-quick-installation-with-uvx) +[![Development](https://img.shields.io/badge/Development-Local_Setup-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 From 946ccaddf380cefb8f4a40fca4a4059173c87b4a Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 7 Aug 2025 00:07:39 -0400 Subject: [PATCH 234/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7f627463..7eb822b6 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your ## Installation Methods [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) -[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) +[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_DXT-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) [![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-quick-installation-with-uvx) [![Development](https://img.shields.io/badge/Development-Local_Setup-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) From 1a4d6c46565f10cf41698201f698b399ceb6deb4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 7 Aug 2025 00:08:21 -0400 Subject: [PATCH 235/565] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7eb822b6..31b1ef32 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_DXT-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) [![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-quick-installation-with-uvx) -[![Development](https://img.shields.io/badge/Development-Local_Setup-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) +[![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 From 9b064b16b6f7a5731788ca2e60052582a6448807 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 7 Aug 2025 00:11:25 -0400 Subject: [PATCH 236/565] fix(readme): update UVX badge link for quick installation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 31b1ef32..5182ad9f 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_DXT-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) -[![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-quick-installation-with-uvx) +[![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-quick-install---universal) [![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 From b7cb45f8d169434ac47d8d82f4055fe2ce96cb7c Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 15 Aug 2025 13:41:35 -0400 Subject: [PATCH 237/565] "Update Claude PR Assistant workflow" --- .github/workflows/claude.yml | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 8658b58d..bc773072 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -23,6 +23,7 @@ jobs: pull-requests: read issues: read id-token: write + actions: read # Required for Claude to read CI results on PRs steps: - name: Checkout repository uses: actions/checkout@v4 @@ -33,26 +34,31 @@ jobs: id: claude uses: anthropics/claude-code-action@beta with: - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - - # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) - # model: "claude-opus-4-20250514" + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + # This is an optional setting that allows Claude to read CI results on PRs + additional_permissions: | + actions: read + + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1) + # model: "claude-opus-4-1-20250805" + # Optional: Customize the trigger phrase (default: @claude) # trigger_phrase: "/claude" - + # Optional: Trigger when specific user is assigned to an issue # assignee_trigger: "claude-bot" - + # Optional: Allow Claude to run specific commands # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" - + # Optional: Add custom instructions for Claude to customize its behavior for your project # custom_instructions: | # Follow our coding standards # Ensure all new code has tests # Use TypeScript for new files - + # Optional: Custom environment variables for Claude # claude_env: | # NODE_ENV: test + From 65109b4b89f0f46280756d2f7b12d35b9a5559ad Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 15 Aug 2025 13:41:36 -0400 Subject: [PATCH 238/565] "Update Claude Code Review workflow" --- .github/workflows/claude-code-review.yml | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index e9ebc818..a12225aa 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -17,14 +17,14 @@ jobs: # github.event.pull_request.user.login == 'external-contributor' || # github.event.pull_request.user.login == 'new-developer' || # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' - + runs-on: ubuntu-latest permissions: contents: read pull-requests: read issues: read id-token: write - + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -35,10 +35,10 @@ jobs: id: claude-review uses: anthropics/claude-code-action@beta with: - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} - # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4) - # model: "claude-opus-4-20250514" + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1) + # model: "claude-opus-4-1-20250805" # Direct prompt for automated review (no @claude mention needed) direct_prompt: | @@ -48,9 +48,12 @@ jobs: - Performance considerations - Security concerns - Test coverage - + Be constructive and helpful in your feedback. + # Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR + # use_sticky_comment: true + # Optional: Customize review based on file types # direct_prompt: | # Review this PR focusing on: @@ -58,17 +61,18 @@ jobs: # - For API endpoints: Security, input validation, and error handling # - For React components: Performance, accessibility, and best practices # - For tests: Coverage, edge cases, and test quality - + # Optional: Different prompts for different authors # direct_prompt: | - # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && + # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} - + # Optional: Add specific tools for running tests or linting # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" - + # Optional: Skip review for certain conditions # if: | # !contains(github.event.pull_request.title, '[skip-review]') && # !contains(github.event.pull_request.title, '[WIP]') + From c54e3fa518d2b5cd827262159771b9d85a6eae13 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 16 Aug 2025 12:05:57 -0400 Subject: [PATCH 239/565] style(workflows): clean up whitespace in YAML files --- .github/workflows/claude-code-review.yml | 17 ++++++++--------- .github/workflows/claude.yml | 13 ++++++------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index a12225aa..7f6bb206 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -17,14 +17,14 @@ jobs: # github.event.pull_request.user.login == 'external-contributor' || # github.event.pull_request.user.login == 'new-developer' || # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' - + runs-on: ubuntu-latest permissions: contents: read pull-requests: read issues: read id-token: write - + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -48,12 +48,12 @@ jobs: - Performance considerations - Security concerns - Test coverage - + Be constructive and helpful in your feedback. # Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR # use_sticky_comment: true - + # Optional: Customize review based on file types # direct_prompt: | # Review this PR focusing on: @@ -61,18 +61,17 @@ jobs: # - For API endpoints: Security, input validation, and error handling # - For React components: Performance, accessibility, and best practices # - For tests: Coverage, edge cases, and test quality - + # Optional: Different prompts for different authors # direct_prompt: | - # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && + # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} - + # Optional: Add specific tools for running tests or linting # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" - + # Optional: Skip review for certain conditions # if: | # !contains(github.event.pull_request.title, '[skip-review]') && # !contains(github.event.pull_request.title, '[WIP]') - diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index bc773072..2b098a07 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -39,26 +39,25 @@ jobs: # This is an optional setting that allows Claude to read CI results on PRs additional_permissions: | actions: read - + # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1) # model: "claude-opus-4-1-20250805" - + # Optional: Customize the trigger phrase (default: @claude) # trigger_phrase: "/claude" - + # Optional: Trigger when specific user is assigned to an issue # assignee_trigger: "claude-bot" - + # Optional: Allow Claude to run specific commands # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" - + # Optional: Add custom instructions for Claude to customize its behavior for your project # custom_instructions: | # Follow our coding standards # Ensure all new code has tests # Use TypeScript for new files - + # Optional: Custom environment variables for Claude # claude_env: | # NODE_ENV: test - From 539db5863f5ae52b387b54f4e92a9787b6d09d7e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 23 Dec 2025 00:01:39 +0000 Subject: [PATCH 240/565] Add renovate.json --- renovate.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..5db72dd6 --- /dev/null +++ b/renovate.json @@ -0,0 +1,6 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:recommended" + ] +} From 0559870a306420e33b752d4f50cd9eb9280b7593 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 23 Dec 2025 00:26:31 +0000 Subject: [PATCH 241/565] chore(deps): update python docker tag to v3.14 --- .python-version | 2 +- Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.python-version b/.python-version index e4fba218..6324d401 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.12 +3.14 diff --git a/Dockerfile b/Dockerfile index 746a1738..1691c66b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-alpine +FROM python:3.14-alpine # Install system dependencies including Chromium and ChromeDriver RUN apk add --no-cache \ From 6f2f6bb38db26eda78153c667b257b6d4016cfbc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 23 Dec 2025 00:26:34 +0000 Subject: [PATCH 242/565] chore(deps): update actions/checkout action to v6 --- .github/workflows/ci.yml | 2 +- .github/workflows/claude-code-review.yml | 2 +- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd182d07..c08555c1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up uv uses: astral-sh/setup-uv@v6 diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index 7f6bb206..368efa36 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 1 diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 2b098a07..b42c45ad 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -26,7 +26,7 @@ jobs: actions: read # Required for Claude to read CI results on PRs steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b2c9d1fa..6aa5f327 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,7 @@ jobs: should-release: ${{ steps.check.outputs.should-release }} new-version: ${{ steps.check.outputs.new-version }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 2 # Need to compare with previous commit @@ -57,7 +57,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 From 726e62e19527b445a70b5f2045cfc182ec4fbe0d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 24 Dec 2025 12:04:49 +0000 Subject: [PATCH 243/565] chore(deps): update astral-sh/setup-uv action to v7 --- .github/workflows/ci.yml | 2 +- .github/workflows/release.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd182d07..5efc4b3a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: uses: actions/checkout@v4 - name: Set up uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b2c9d1fa..24be9c26 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true @@ -62,7 +62,7 @@ jobs: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: enable-cache: true From 2a7eeab90dd84d2ddcefc420d5c8ce73714e5fa6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 24 Dec 2025 12:04:52 +0000 Subject: [PATCH 244/565] chore(deps): update oven-sh/setup-bun action to v2 --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b2c9d1fa..67dfb124 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,7 +67,7 @@ jobs: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@v1 + uses: oven-sh/setup-bun@v2 - name: Update manifest.json version and Docker image run: | From 61d620a4e996a4c780d2734b098770142a5687d0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 25 Dec 2025 00:18:34 +0100 Subject: [PATCH 245/565] "Update Claude PR Assistant workflow" --- .github/workflows/claude.yml | 27 +++++++-------------------- 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 2b098a07..d300267f 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@beta + uses: anthropics/claude-code-action@v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} @@ -40,24 +40,11 @@ jobs: additional_permissions: | actions: read - # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1) - # model: "claude-opus-4-1-20250805" + # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it. + # prompt: 'Update the pull request description to include a summary of changes.' - # Optional: Customize the trigger phrase (default: @claude) - # trigger_phrase: "/claude" + # Optional: Add claude_args to customize behavior and configuration + # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md + # or https://code.claude.com/docs/en/cli-reference for available options + # claude_args: '--allowed-tools Bash(gh pr:*)' - # Optional: Trigger when specific user is assigned to an issue - # assignee_trigger: "claude-bot" - - # Optional: Allow Claude to run specific commands - # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" - - # Optional: Add custom instructions for Claude to customize its behavior for your project - # custom_instructions: | - # Follow our coding standards - # Ensure all new code has tests - # Use TypeScript for new files - - # Optional: Custom environment variables for Claude - # claude_env: | - # NODE_ENV: test From 6e2062e2affe1203b619a62969affd81b796883b Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Thu, 25 Dec 2025 00:18:35 +0100 Subject: [PATCH 246/565] "Update Claude Code Review workflow" --- .github/workflows/claude-code-review.yml | 38 ++++++------------------ 1 file changed, 9 insertions(+), 29 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index 7f6bb206..8452b0f2 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -33,15 +33,13 @@ jobs: - name: Run Claude Code Review id: claude-review - uses: anthropics/claude-code-action@beta + uses: anthropics/claude-code-action@v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + prompt: | + REPO: ${{ github.repository }} + PR NUMBER: ${{ github.event.pull_request.number }} - # Optional: Specify model (defaults to Claude Sonnet 4, uncomment for Claude Opus 4.1) - # model: "claude-opus-4-1-20250805" - - # Direct prompt for automated review (no @claude mention needed) - direct_prompt: | Please review this pull request and provide feedback on: - Code quality and best practices - Potential bugs or issues @@ -49,29 +47,11 @@ jobs: - Security concerns - Test coverage - Be constructive and helpful in your feedback. - - # Optional: Use sticky comments to make Claude reuse the same comment on subsequent pushes to the same PR - # use_sticky_comment: true - - # Optional: Customize review based on file types - # direct_prompt: | - # Review this PR focusing on: - # - For TypeScript files: Type safety and proper interface usage - # - For API endpoints: Security, input validation, and error handling - # - For React components: Performance, accessibility, and best practices - # - For tests: Coverage, edge cases, and test quality + Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback. - # Optional: Different prompts for different authors - # direct_prompt: | - # ${{ github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' && - # 'Welcome! Please review this PR from a first-time contributor. Be encouraging and provide detailed explanations for any suggestions.' || - # 'Please provide a thorough code review focusing on our coding standards and best practices.' }} + Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR. - # Optional: Add specific tools for running tests or linting - # allowed_tools: "Bash(npm run test),Bash(npm run lint),Bash(npm run typecheck)" + # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md + # or https://code.claude.com/docs/en/cli-reference for available options + claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"' - # Optional: Skip review for certain conditions - # if: | - # !contains(github.event.pull_request.title, '[skip-review]') && - # !contains(github.event.pull_request.title, '[WIP]') From 6771cb42ca9a8d8e9b7be5bfe2ea09b4a96ff46e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 25 Dec 2025 00:23:42 +0100 Subject: [PATCH 247/565] style(workflows): ensure YAML files end with newline --- .github/workflows/claude-code-review.yml | 1 - .github/workflows/claude.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index 8452b0f2..e262939e 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -54,4 +54,3 @@ jobs: # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md # or https://code.claude.com/docs/en/cli-reference for available options claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"' - diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index d300267f..9471a059 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -47,4 +47,3 @@ jobs: # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md # or https://code.claude.com/docs/en/cli-reference for available options # claude_args: '--allowed-tools Bash(gh pr:*)' - From de07c9559dd012a09ea4ded8d2149bbbe0854b76 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 25 Dec 2025 10:07:15 +0100 Subject: [PATCH 248/565] fix(ci): downgrade Python from 3.14 to 3.13 Python 3.14 is too new and key dependencies lack support: - pydantic-core: PyO3 doesn't support Python 3.14 yet - lxml: No pre-built wheels for Python 3.14 Python 3.13 is still modern and has full ecosystem support. --- .python-version | 2 +- Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.python-version b/.python-version index 6324d401..24ee5b1b 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.14 +3.13 diff --git a/Dockerfile b/Dockerfile index 1691c66b..03393644 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.14-alpine +FROM python:3.13-alpine # Install system dependencies including Chromium and ChromeDriver RUN apk add --no-cache \ From 9ca18237d21fe6df20c8504e7c05aa3bc373b1f5 Mon Sep 17 00:00:00 2001 From: triepod-ai Date: Mon, 29 Dec 2025 13:39:59 -0600 Subject: [PATCH 249/565] feat: Add MCP tool annotations for improved LLM integration MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add ToolAnnotations to all 6 tools with appropriate hints: - get_person_profile: readOnly, openWorld (LinkedIn API) - get_company_profile: readOnly, openWorld (LinkedIn API) - get_job_details: readOnly, openWorld (LinkedIn API) - search_jobs: readOnly, openWorld (LinkedIn API) - get_recommended_jobs: readOnly, openWorld (LinkedIn API) - close_session: not readOnly, not openWorld (local session mgmt) Tool annotations help LLM clients understand tool behavior and make better decisions about tool selection and user confirmations. ๐Ÿค– Generated with [Claude Code](https://claude.com/claude-code) --- linkedin_mcp_server/server.py | 10 +++++++++- linkedin_mcp_server/tools/company.py | 10 +++++++++- linkedin_mcp_server/tools/job.py | 28 +++++++++++++++++++++++++--- linkedin_mcp_server/tools/person.py | 10 +++++++++- 4 files changed, 52 insertions(+), 6 deletions(-) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 7f7370a2..2f488445 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -11,6 +11,7 @@ from typing import Any, Dict from fastmcp import FastMCP +from mcp.types import ToolAnnotations from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools @@ -29,7 +30,14 @@ def create_mcp_server() -> FastMCP: register_job_tools(mcp) # Register session management tool - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Close Session", + readOnlyHint=False, + destructiveHint=False, + openWorldHint=False, + ) + ) async def close_session() -> Dict[str, Any]: """Close the current browser session and clean up resources.""" from linkedin_mcp_server.drivers.chrome import close_all_drivers diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 5a503584..c2a70077 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -11,6 +11,7 @@ from fastmcp import FastMCP from linkedin_scraper import Company +from mcp.types import ToolAnnotations from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver @@ -25,7 +26,14 @@ def register_company_tools(mcp: FastMCP) -> None: mcp (FastMCP): The MCP server instance """ - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Get Company Profile", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) async def get_company_profile( company_name: str, get_employees: bool = False ) -> Dict[str, Any]: diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index d6513522..973a4b37 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -11,6 +11,7 @@ from fastmcp import FastMCP from linkedin_scraper import Job, JobSearch +from mcp.types import ToolAnnotations from linkedin_mcp_server.error_handler import ( handle_tool_error, @@ -29,7 +30,14 @@ def register_job_tools(mcp: FastMCP) -> None: mcp (FastMCP): The MCP server instance """ - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Get Job Details", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) async def get_job_details(job_id: str) -> Dict[str, Any]: """ Get job details for a specific job posting on LinkedIn @@ -55,7 +63,14 @@ async def get_job_details(job_id: str) -> Dict[str, Any]: except Exception as e: return handle_tool_error(e, "get_job_details") - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Search Jobs", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) async def search_jobs(search_term: str) -> List[Dict[str, Any]]: """ Search for jobs on LinkedIn using a search term. @@ -78,7 +93,14 @@ async def search_jobs(search_term: str) -> List[Dict[str, Any]]: except Exception as e: return handle_tool_error_list(e, "search_jobs") - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Get Recommended Jobs", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) async def get_recommended_jobs() -> List[Dict[str, Any]]: """ Get your personalized recommended jobs from LinkedIn diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index c9428a7d..a5c1b0a2 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -11,6 +11,7 @@ from fastmcp import FastMCP from linkedin_scraper import Person +from mcp.types import ToolAnnotations from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver @@ -25,7 +26,14 @@ def register_person_tools(mcp: FastMCP) -> None: mcp (FastMCP): The MCP server instance """ - @mcp.tool() + @mcp.tool( + annotations=ToolAnnotations( + title="Get Person Profile", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: """ Get a specific person's LinkedIn profile. From e0460c80584cae1fe905d83c1e27cf7b66906639 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 14:14:42 +0100 Subject: [PATCH 250/565] fix(vscode): correct entry point in VS Code tasks Replace non-existent main.py with module execution (-m linkedin_mcp_server) in VS Code task configurations --- .vscode/tasks.json | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 1ed815b1..a92fb78c 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -41,13 +41,14 @@ "problemMatcher": [] }, { - "label": "uv run main.py --debug --no-headless --no-lazy-init", - "detail": "Run main.py in debug mode with visible window and login immediately", + "label": "uv run -m linkedin_mcp_server --debug --no-headless --no-lazy-init", + "detail": "Run server in debug mode with visible window and login immediately", "type": "shell", "command": "uv", "args": [ "run", - "main.py", + "-m", + "linkedin_mcp_server", "--debug", "--no-headless", "--no-lazy-init" @@ -64,13 +65,14 @@ "problemMatcher": [] }, { - "label": "uv run main.py --no-headless --no-lazy-init", - "detail": "Run main.py with visible window and login immediately", + "label": "uv run -m linkedin_mcp_server --no-headless --no-lazy-init", + "detail": "Run server with visible window and login immediately", "type": "shell", "command": "uv", "args": [ "run", - "main.py", + "-m", + "linkedin_mcp_server", "--no-headless", "--no-lazy-init" ], @@ -85,13 +87,14 @@ "problemMatcher": [] }, { - "label": "uv run main.py --no-headless --no-lazy-init --transport streamable-http", + "label": "uv run -m linkedin_mcp_server --no-headless --no-lazy-init --transport streamable-http", "detail": "Start HTTP MCP server on localhost:8000/mcp", "type": "shell", "command": "uv", "args": [ "run", - "main.py", + "-m", + "linkedin_mcp_server", "--no-headless", "--no-lazy-init", "--transport", From b942d15fdf7fc8a754fcbaf717da9aec37cd8714 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 16:19:30 +0100 Subject: [PATCH 251/565] fix(vscode): use --log-level DEBUG instead of --debug The CLI uses --log-level {DEBUG,INFO,WARNING,ERROR} not --debug --- .vscode/tasks.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index a92fb78c..92139fcf 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -41,7 +41,7 @@ "problemMatcher": [] }, { - "label": "uv run -m linkedin_mcp_server --debug --no-headless --no-lazy-init", + "label": "uv run -m linkedin_mcp_server --log-level DEBUG --no-headless --no-lazy-init", "detail": "Run server in debug mode with visible window and login immediately", "type": "shell", "command": "uv", @@ -49,7 +49,8 @@ "run", "-m", "linkedin_mcp_server", - "--debug", + "--log-level", + "DEBUG", "--no-headless", "--no-lazy-init" ], From f2b67c21437afab6bcd83aa5aa026a7a1cd264de Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 16:51:40 +0100 Subject: [PATCH 252/565] fix(deps): upgrade fastmcp to fix 307 redirect issue Upgrade fastmcp from >=2.10.1 to >=2.14.0 to fix the 307 Temporary Redirect issue when using streamable-http transport. The fix was merged in FastMCP PR #896 and #998, which changed default paths to include trailing slashes and removed automatic path manipulation that caused redirect loops with Starlette's Mount routing. This also upgrades mcp from 1.10.1 to 1.25.0 which includes related fixes confirmed by users in modelcontextprotocol/python-sdk#1168. Resolves: #54 --- pyproject.toml | 2 +- uv.lock | 551 ++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 520 insertions(+), 33 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 891072d4..17873fd5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "MCP server for LinkedIn profile, company, and job scraping with C readme = "README.md" requires-python = ">=3.12" dependencies = [ - "fastmcp>=2.10.1", + "fastmcp>=2.14.0", "inquirer>=3.4.0", "keyring>=25.6.0", "linkedin-scraper", diff --git a/uv.lock b/uv.lock index 0956c41a..311cb3bb 100644 --- a/uv.lock +++ b/uv.lock @@ -118,14 +118,23 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.0" +version = "1.6.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/9d/b1e08d36899c12c8b894a44a5583ee157789f26fc4b176f8e4b6217b56e1/authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210", size = 158371, upload-time = "2025-05-23T00:21:45.011Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, +] + +[[package]] +name = "beartype" +version = "0.22.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/94/1009e248bbfbab11397abca7193bea6626806be9a327d399810d523a07cb/beartype-0.22.9.tar.gz", hash = "sha256:8f82b54aa723a2848a56008d18875f91c1db02c32ef6a62319a002e3e25a975f", size = 1608866, upload-time = "2025-12-13T06:50:30.72Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/18245721fa7747065ab478316c7fea7c74777d07f37ae60db2e84f8172e8/beartype-0.22.9-py3-none-any.whl", hash = "sha256:d16c9bbc61ea14637596c5f6fbff2ee99cbe3573e46a716401734ef50c3060c2", size = 1333658, upload-time = "2025-12-13T06:50:28.266Z" }, ] [[package]] @@ -142,13 +151,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372, upload-time = "2023-02-04T02:25:43.093Z" }, ] +[[package]] +name = "cachetools" +version = "6.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, +] + [[package]] name = "certifi" -version = "2025.4.26" +version = "2026.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, ] [[package]] @@ -240,6 +258,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] +[[package]] +name = "cloudpickle" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -326,6 +353,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, ] +[[package]] +name = "cyclopts" +version = "4.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "docstring-parser" }, + { name = "rich" }, + { name = "rich-rst" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/21/732453ae69d65d72fe37a34f8b1a455c72313b8b0a905b876da20ff7e81a/cyclopts-4.4.3.tar.gz", hash = "sha256:03797c71b49a39dcad8324d6655363056fb998e2ba0240940050331a7f63fe65", size = 159360, upload-time = "2025-12-28T18:57:03.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/28/03f9b8fbf396b3f2eaf65a7ff441ba2fb7dd397109d563a4e556dc5b3efb/cyclopts-4.4.3-py3-none-any.whl", hash = "sha256:951611a9d4d88d9916716ae281faca9af1cb79b88bb4f22bd0192cff54e7dec6", size = 196707, upload-time = "2025-12-28T18:57:04.884Z" }, +] + +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, +] + [[package]] name = "distlib" version = "0.3.9" @@ -344,6 +395,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, ] +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + +[[package]] +name = "docutils" +version = "0.22.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/b6/03bb70946330e88ffec97aefd3ea75ba575cb2e762061e0e62a213befee8/docutils-0.22.4.tar.gz", hash = "sha256:4db53b1fde9abecbb74d91230d32ab626d94f6badfc575d6db9194a49df29968", size = 2291750, upload-time = "2025-12-18T19:00:26.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, +] + [[package]] name = "editor" version = "1.6.6" @@ -382,24 +451,49 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] +[[package]] +name = "fakeredis" +version = "2.33.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "redis" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/f9/57464119936414d60697fcbd32f38909bb5688b616ae13de6e98384433e0/fakeredis-2.33.0.tar.gz", hash = "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770", size = 175187, upload-time = "2025-12-16T19:45:52.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/78/a850fed8aeef96d4a99043c90b818b2ed5419cd5b24a4049fd7cfb9f1471/fakeredis-2.33.0-py3-none-any.whl", hash = "sha256:de535f3f9ccde1c56672ab2fdd6a8efbc4f2619fc2f1acc87b8737177d71c965", size = 119605, upload-time = "2025-12-16T19:45:51.08Z" }, +] + +[package.optional-dependencies] +lua = [ + { name = "lupa" }, +] + [[package]] name = "fastmcp" -version = "2.10.1" +version = "2.14.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, + { name = "cyclopts" }, { name = "exceptiongroup" }, { name = "httpx" }, + { name = "jsonschema-path" }, { name = "mcp" }, { name = "openapi-pydantic" }, + { name = "platformdirs" }, + { name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] }, { name = "pydantic", extra = ["email"] }, + { name = "pydocket" }, + { name = "pyperclip" }, { name = "python-dotenv" }, { name = "rich" }, - { name = "typer" }, + { name = "uvicorn" }, + { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/1f/0031ea07bcad9f9b38d3500772d2749ca2b16335b92bd012f1d2f86a853e/fastmcp-2.10.1.tar.gz", hash = "sha256:450c72e523926a2203c7eecdb4a8b0507506667bc8736b8b7bb44f6312424649", size = 2730387, upload-time = "2025-07-02T04:57:24.981Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/1e/e3528227688c248283f6d86869b1e900563ffc223eff00f4f923d2750365/fastmcp-2.14.2.tar.gz", hash = "sha256:bd23d1b808b6f446444f10114dac468b11bfb9153ed78628f5619763d0cf573e", size = 8272966, upload-time = "2025-12-31T15:26:13.433Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/a2/52ef74287ec5fe0e5a0ffedde7d0809da5ec3ac85f4e3f2ed5587b39471a/fastmcp-2.10.1-py3-none-any.whl", hash = "sha256:17d0acea04eeb3464c9eca42b6774fb06b38b72cface9af6a7482b3aa561db13", size = 182108, upload-time = "2025-07-02T04:57:23.529Z" }, + { url = "https://files.pythonhosted.org/packages/0d/67/8456d39484fcb7afd0defed21918e773ed59a98b39e5b633328527c88367/fastmcp-2.14.2-py3-none-any.whl", hash = "sha256:e33cd622e1ebd5110af6a981804525b6cd41072e3c7d68268ed69ef3be651aca", size = 413279, upload-time = "2025-12-31T15:26:11.178Z" }, ] [[package]] @@ -535,6 +629,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -627,6 +733,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, ] +[[package]] +name = "jsonschema-path" +version = "0.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pathable" }, + { name = "pyyaml" }, + { name = "referencing" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159, upload-time = "2025-01-24T14:33:16.547Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810, upload-time = "2025-01-24T14:33:14.652Z" }, +] + [[package]] name = "jsonschema-specifications" version = "2025.4.1" @@ -681,7 +802,7 @@ dev = [ [package.metadata] requires-dist = [ - { name = "fastmcp", specifier = ">=2.10.1" }, + { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "keyring", specifier = ">=25.6.0" }, { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, @@ -710,6 +831,58 @@ dependencies = [ { name = "selenium" }, ] +[[package]] +name = "lupa" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/1c/191c3e6ec6502e3dbe25a53e27f69a5daeac3e56de1f73c0138224171ead/lupa-2.6.tar.gz", hash = "sha256:9a770a6e89576be3447668d7ced312cd6fd41d3c13c2462c9dc2c2ab570e45d9", size = 7240282, upload-time = "2025-10-24T07:20:29.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/86/ce243390535c39d53ea17ccf0240815e6e457e413e40428a658ea4ee4b8d/lupa-2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47ce718817ef1cc0c40d87c3d5ae56a800d61af00fbc0fad1ca9be12df2f3b56", size = 951707, upload-time = "2025-10-24T07:18:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/cedea5e6cbeb54396fdcc55f6b741696f3f036d23cfaf986d50d680446da/lupa-2.6-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7aba985b15b101495aa4b07112cdc08baa0c545390d560ad5cfde2e9e34f4d58", size = 1916703, upload-time = "2025-10-24T07:18:05.6Z" }, + { url = "https://files.pythonhosted.org/packages/24/be/3d6b5f9a8588c01a4d88129284c726017b2089f3a3fd3ba8bd977292fea0/lupa-2.6-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:b766f62f95b2739f2248977d29b0722e589dcf4f0ccfa827ccbd29f0148bd2e5", size = 985152, upload-time = "2025-10-24T07:18:08.561Z" }, + { url = "https://files.pythonhosted.org/packages/eb/23/9f9a05beee5d5dce9deca4cb07c91c40a90541fc0a8e09db4ee670da550f/lupa-2.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:00a934c23331f94cb51760097ebfab14b005d55a6b30a2b480e3c53dd2fa290d", size = 1159599, upload-time = "2025-10-24T07:18:10.346Z" }, + { url = "https://files.pythonhosted.org/packages/40/4e/e7c0583083db9d7f1fd023800a9767d8e4391e8330d56c2373d890ac971b/lupa-2.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21de9f38bd475303e34a042b7081aabdf50bd9bafd36ce4faea2f90fd9f15c31", size = 1038686, upload-time = "2025-10-24T07:18:12.112Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/5a4f7d959d4feba5e203ff0c31889e74d1ca3153122be4a46dca7d92bf7c/lupa-2.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf3bda96d3fc41237e964a69c23647d50d4e28421111360274d4799832c560e9", size = 2071956, upload-time = "2025-10-24T07:18:14.572Z" }, + { url = "https://files.pythonhosted.org/packages/92/34/2f4f13ca65d01169b1720176aedc4af17bc19ee834598c7292db232cb6dc/lupa-2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a76ead245da54801a81053794aa3975f213221f6542d14ec4b859ee2e7e0323", size = 1057199, upload-time = "2025-10-24T07:18:16.379Z" }, + { url = "https://files.pythonhosted.org/packages/35/2a/5f7d2eebec6993b0dcd428e0184ad71afb06a45ba13e717f6501bfed1da3/lupa-2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8dd0861741caa20886ddbda0a121d8e52fb9b5bb153d82fa9bba796962bf30e8", size = 1173693, upload-time = "2025-10-24T07:18:18.153Z" }, + { url = "https://files.pythonhosted.org/packages/e4/29/089b4d2f8e34417349af3904bb40bec40b65c8731f45e3fd8d497ca573e5/lupa-2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:239e63948b0b23023f81d9a19a395e768ed3da6a299f84e7963b8f813f6e3f9c", size = 2164394, upload-time = "2025-10-24T07:18:20.403Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1b/79c17b23c921f81468a111cad843b076a17ef4b684c4a8dff32a7969c3f0/lupa-2.6-cp312-cp312-win32.whl", hash = "sha256:325894e1099499e7a6f9c351147661a2011887603c71086d36fe0f964d52d1ce", size = 1420647, upload-time = "2025-10-24T07:18:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/b8/15/5121e68aad3584e26e1425a5c9a79cd898f8a152292059e128c206ee817c/lupa-2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c735a1ce8ee60edb0fe71d665f1e6b7c55c6021f1d340eb8c865952c602cd36f", size = 1688529, upload-time = "2025-10-24T07:18:25.523Z" }, + { url = "https://files.pythonhosted.org/packages/28/1d/21176b682ca5469001199d8b95fa1737e29957a3d185186e7a8b55345f2e/lupa-2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:663a6e58a0f60e7d212017d6678639ac8df0119bc13c2145029dcba084391310", size = 947232, upload-time = "2025-10-24T07:18:27.878Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4c/d327befb684660ca13cf79cd1f1d604331808f9f1b6fb6bf57832f8edf80/lupa-2.6-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f5afda5c20b1f3217a80e9bc1b77037f8a6eb11612fd3ada19065303c8f380", size = 1908625, upload-time = "2025-10-24T07:18:29.944Z" }, + { url = "https://files.pythonhosted.org/packages/66/8e/ad22b0a19454dfd08662237a84c792d6d420d36b061f239e084f29d1a4f3/lupa-2.6-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:26f2b3c085fe76e9119e48c1013c1cccdc1f51585d456858290475aa38e7089e", size = 981057, upload-time = "2025-10-24T07:18:31.553Z" }, + { url = "https://files.pythonhosted.org/packages/5c/48/74859073ab276bd0566c719f9ca0108b0cfc1956ca0d68678d117d47d155/lupa-2.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:60d2f902c7b96fb8ab98493dcff315e7bb4d0b44dc9dd76eb37de575025d5685", size = 1156227, upload-time = "2025-10-24T07:18:33.981Z" }, + { url = "https://files.pythonhosted.org/packages/09/6c/0e9ded061916877253c2266074060eb71ed99fb21d73c8c114a76725bce2/lupa-2.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a02d25dee3a3250967c36590128d9220ae02f2eda166a24279da0b481519cbff", size = 1035752, upload-time = "2025-10-24T07:18:36.32Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ef/f8c32e454ef9f3fe909f6c7d57a39f950996c37a3deb7b391fec7903dab7/lupa-2.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6eae1ee16b886b8914ff292dbefbf2f48abfbdee94b33a88d1d5475e02423203", size = 2069009, upload-time = "2025-10-24T07:18:38.072Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/15b80c226a5225815a890ee1c11f07968e0aba7a852df41e8ae6fe285063/lupa-2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0edd5073a4ee74ab36f74fe61450148e6044f3952b8d21248581f3c5d1a58be", size = 1056301, upload-time = "2025-10-24T07:18:40.165Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/2086c1425c985acfb30997a67e90c39457122df41324d3c179d6ee2292c6/lupa-2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c53ee9f22a8a17e7d4266ad48e86f43771951797042dd51d1494aaa4f5f3f0a", size = 1170673, upload-time = "2025-10-24T07:18:42.426Z" }, + { url = "https://files.pythonhosted.org/packages/10/e5/b216c054cf86576c0191bf9a9f05de6f7e8e07164897d95eea0078dca9b2/lupa-2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:de7c0f157a9064a400d828789191a96da7f4ce889969a588b87ec80de9b14772", size = 2162227, upload-time = "2025-10-24T07:18:46.112Z" }, + { url = "https://files.pythonhosted.org/packages/59/2f/33ecb5bedf4f3bc297ceacb7f016ff951331d352f58e7e791589609ea306/lupa-2.6-cp313-cp313-win32.whl", hash = "sha256:ee9523941ae0a87b5b703417720c5d78f72d2f5bc23883a2ea80a949a3ed9e75", size = 1419558, upload-time = "2025-10-24T07:18:48.371Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b4/55e885834c847ea610e111d87b9ed4768f0afdaeebc00cd46810f25029f6/lupa-2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b1335a5835b0a25ebdbc75cf0bda195e54d133e4d994877ef025e218c2e59db9", size = 1683424, upload-time = "2025-10-24T07:18:50.976Z" }, + { url = "https://files.pythonhosted.org/packages/66/9d/d9427394e54d22a35d1139ef12e845fd700d4872a67a34db32516170b746/lupa-2.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dcb6d0a3264873e1653bc188499f48c1fb4b41a779e315eba45256cfe7bc33c1", size = 953818, upload-time = "2025-10-24T07:18:53.378Z" }, + { url = "https://files.pythonhosted.org/packages/10/41/27bbe81953fb2f9ecfced5d9c99f85b37964cfaf6aa8453bb11283983721/lupa-2.6-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:a37e01f2128f8c36106726cb9d360bac087d58c54b4522b033cc5691c584db18", size = 1915850, upload-time = "2025-10-24T07:18:55.259Z" }, + { url = "https://files.pythonhosted.org/packages/a3/98/f9ff60db84a75ba8725506bbf448fb085bc77868a021998ed2a66d920568/lupa-2.6-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:458bd7e9ff3c150b245b0fcfbb9bd2593d1152ea7f0a7b91c1d185846da033fe", size = 982344, upload-time = "2025-10-24T07:18:57.05Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/f39e0f1c055c3b887d86b404aaf0ca197b5edfd235a8b81b45b25bac7fc3/lupa-2.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:052ee82cac5206a02df77119c325339acbc09f5ce66967f66a2e12a0f3211cad", size = 1156543, upload-time = "2025-10-24T07:18:59.251Z" }, + { url = "https://files.pythonhosted.org/packages/9e/9c/59e6cffa0d672d662ae17bd7ac8ecd2c89c9449dee499e3eb13ca9cd10d9/lupa-2.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96594eca3c87dd07938009e95e591e43d554c1dbd0385be03c100367141db5a8", size = 1047974, upload-time = "2025-10-24T07:19:01.449Z" }, + { url = "https://files.pythonhosted.org/packages/23/c6/a04e9cef7c052717fcb28fb63b3824802488f688391895b618e39be0f684/lupa-2.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8faddd9d198688c8884091173a088a8e920ecc96cda2ffed576a23574c4b3f6", size = 2073458, upload-time = "2025-10-24T07:19:03.369Z" }, + { url = "https://files.pythonhosted.org/packages/e6/10/824173d10f38b51fc77785228f01411b6ca28826ce27404c7c912e0e442c/lupa-2.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:daebb3a6b58095c917e76ba727ab37b27477fb926957c825205fbda431552134", size = 1067683, upload-time = "2025-10-24T07:19:06.2Z" }, + { url = "https://files.pythonhosted.org/packages/b6/dc/9692fbcf3c924d9c4ece2d8d2f724451ac2e09af0bd2a782db1cef34e799/lupa-2.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f3154e68972befe0f81564e37d8142b5d5d79931a18309226a04ec92487d4ea3", size = 1171892, upload-time = "2025-10-24T07:19:08.544Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/e318b628d4643c278c96ab3ddea07fc36b075a57383c837f5b11e537ba9d/lupa-2.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e4dadf77b9fedc0bfa53417cc28dc2278a26d4cbd95c29f8927ad4d8fe0a7ef9", size = 2166641, upload-time = "2025-10-24T07:19:10.485Z" }, + { url = "https://files.pythonhosted.org/packages/12/f7/a6f9ec2806cf2d50826980cdb4b3cffc7691dc6f95e13cc728846d5cb793/lupa-2.6-cp314-cp314-win32.whl", hash = "sha256:cb34169c6fa3bab3e8ac58ca21b8a7102f6a94b6a5d08d3636312f3f02fafd8f", size = 1456857, upload-time = "2025-10-24T07:19:37.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/de/df71896f25bdc18360fdfa3b802cd7d57d7fede41a0e9724a4625b412c85/lupa-2.6-cp314-cp314-win_amd64.whl", hash = "sha256:b74f944fe46c421e25d0f8692aef1e842192f6f7f68034201382ac440ef9ea67", size = 1731191, upload-time = "2025-10-24T07:19:40.281Z" }, + { url = "https://files.pythonhosted.org/packages/47/3c/a1f23b01c54669465f5f4c4083107d496fbe6fb45998771420e9aadcf145/lupa-2.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0e21b716408a21ab65723f8841cf7f2f37a844b7a965eeabb785e27fca4099cf", size = 999343, upload-time = "2025-10-24T07:19:12.519Z" }, + { url = "https://files.pythonhosted.org/packages/c5/6d/501994291cb640bfa2ccf7f554be4e6914afa21c4026bd01bff9ca8aac57/lupa-2.6-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:589db872a141bfff828340079bbdf3e9a31f2689f4ca0d88f97d9e8c2eae6142", size = 2000730, upload-time = "2025-10-24T07:19:14.869Z" }, + { url = "https://files.pythonhosted.org/packages/53/a5/457ffb4f3f20469956c2d4c4842a7675e884efc895b2f23d126d23e126cc/lupa-2.6-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:cd852a91a4a9d4dcbb9a58100f820a75a425703ec3e3f049055f60b8533b7953", size = 1021553, upload-time = "2025-10-24T07:19:17.123Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/36bb5a5d0960f2a5c7c700e0819abb76fd9bf9c1d8a66e5106416d6e9b14/lupa-2.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:0334753be028358922415ca97a64a3048e4ed155413fc4eaf87dd0a7e2752983", size = 1133275, upload-time = "2025-10-24T07:19:20.51Z" }, + { url = "https://files.pythonhosted.org/packages/19/86/202ff4429f663013f37d2229f6176ca9f83678a50257d70f61a0a97281bf/lupa-2.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:661d895cd38c87658a34780fac54a690ec036ead743e41b74c3fb81a9e65a6aa", size = 1038441, upload-time = "2025-10-24T07:19:22.509Z" }, + { url = "https://files.pythonhosted.org/packages/a7/42/d8125f8e420714e5b52e9c08d88b5329dfb02dcca731b4f21faaee6cc5b5/lupa-2.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aa58454ccc13878cc177c62529a2056be734da16369e451987ff92784994ca7", size = 2058324, upload-time = "2025-10-24T07:19:24.979Z" }, + { url = "https://files.pythonhosted.org/packages/2b/2c/47bf8b84059876e877a339717ddb595a4a7b0e8740bacae78ba527562e1c/lupa-2.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1425017264e470c98022bba8cff5bd46d054a827f5df6b80274f9cc71dafd24f", size = 1060250, upload-time = "2025-10-24T07:19:27.262Z" }, + { url = "https://files.pythonhosted.org/packages/c2/06/d88add2b6406ca1bdec99d11a429222837ca6d03bea42ca75afa169a78cb/lupa-2.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:224af0532d216e3105f0a127410f12320f7c5f1aa0300bdf9646b8d9afb0048c", size = 1151126, upload-time = "2025-10-24T07:19:29.522Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a0/89e6a024c3b4485b89ef86881c9d55e097e7cb0bdb74efb746f2fa6a9a76/lupa-2.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9abb98d5a8fd27c8285302e82199f0e56e463066f88f619d6594a450bf269d80", size = 2153693, upload-time = "2025-10-24T07:19:31.379Z" }, + { url = "https://files.pythonhosted.org/packages/b6/36/a0f007dc58fc1bbf51fb85dcc82fcb1f21b8c4261361de7dab0e3d8521ef/lupa-2.6-cp314-cp314t-win32.whl", hash = "sha256:1849efeba7a8f6fb8aa2c13790bee988fd242ae404bd459509640eeea3d1e291", size = 1590104, upload-time = "2025-10-24T07:19:33.514Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" }, +] + [[package]] name = "lxml" version = "6.0.0" @@ -764,7 +937,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.10.1" +version = "1.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -773,14 +946,18 @@ dependencies = [ { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, ] [[package]] @@ -885,6 +1062,75 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "prometheus-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/14/39/7dafa6fff210737267bed35a8855b6ac7399b9e582b8cf1f25f842517012/opentelemetry_exporter_prometheus-0.60b1.tar.gz", hash = "sha256:a4011b46906323f71724649d301b4dc188aaa068852e814f4df38cc76eac616b", size = 14976, upload-time = "2025-12-11T13:32:42.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/0d/4be6bf5477a3eb3d917d2f17d3c0b6720cd6cb97898444a61d43cc983f5c/opentelemetry_exporter_prometheus-0.60b1-py3-none-any.whl", hash = "sha256:49f59178de4f4590e3cef0b8b95cf6e071aae70e1f060566df5546fad773b8fd", size = 13019, upload-time = "2025-12-11T13:32:23.974Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, +] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -906,6 +1152,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathable" +version = "0.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/93/8f2c2075b180c12c1e9f6a09d1a985bc2036906b13dff1d8917e395f2048/pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2", size = 8124, upload-time = "2025-01-10T18:43:13.247Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/eb/b6260b31b1a96386c0a880edebe26f89669098acea8e0318bff6adb378fd/pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2", size = 9592, upload-time = "2025-01-10T18:43:11.88Z" }, +] + +[[package]] +name = "pathvalidate" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" }, +] + [[package]] name = "platformdirs" version = "4.3.8" @@ -940,6 +1204,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + [[package]] name = "propcache" version = "0.3.2" @@ -997,6 +1270,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] +[[package]] +name = "py-key-value-aio" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beartype" }, + { name = "py-key-value-shared" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/ce/3136b771dddf5ac905cc193b461eb67967cf3979688c6696e1f2cdcde7ea/py_key_value_aio-0.3.0.tar.gz", hash = "sha256:858e852fcf6d696d231266da66042d3355a7f9871650415feef9fca7a6cd4155", size = 50801, upload-time = "2025-11-17T16:50:04.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/10/72f6f213b8f0bce36eff21fda0a13271834e9eeff7f9609b01afdc253c79/py_key_value_aio-0.3.0-py3-none-any.whl", hash = "sha256:1c781915766078bfd608daa769fefb97e65d1d73746a3dfb640460e322071b64", size = 96342, upload-time = "2025-11-17T16:50:03.801Z" }, +] + +[package.optional-dependencies] +disk = [ + { name = "diskcache" }, + { name = "pathvalidate" }, +] +keyring = [ + { name = "keyring" }, +] +memory = [ + { name = "cachetools" }, +] +redis = [ + { name = "redis" }, +] + +[[package]] +name = "py-key-value-shared" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beartype" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/e4/1971dfc4620a3a15b4579fe99e024f5edd6e0967a71154771a059daff4db/py_key_value_shared-0.3.0.tar.gz", hash = "sha256:8fdd786cf96c3e900102945f92aa1473138ebe960ef49da1c833790160c28a4b", size = 11666, upload-time = "2025-11-17T16:50:06.849Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/e4/b8b0a03ece72f47dce2307d36e1c34725b7223d209fc679315ffe6a4e2c3/py_key_value_shared-0.3.0-py3-none-any.whl", hash = "sha256:5b0efba7ebca08bb158b1e93afc2f07d30b8f40c2fc12ce24a4c0d84f42f9298", size = 19560, upload-time = "2025-11-17T16:50:05.954Z" }, +] + [[package]] name = "pycparser" version = "2.22" @@ -1081,6 +1395,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839, upload-time = "2025-02-27T10:10:30.711Z" }, ] +[[package]] +name = "pydocket" +version = "0.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cloudpickle" }, + { name = "fakeredis", extra = ["lua"] }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-prometheus" }, + { name = "opentelemetry-instrumentation" }, + { name = "prometheus-client" }, + { name = "py-key-value-aio", extra = ["memory", "redis"] }, + { name = "python-json-logger" }, + { name = "redis" }, + { name = "rich" }, + { name = "typer" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/c5/61dcfce4d50b66a3f09743294d37fab598b81bb0975054b7f732da9243ec/pydocket-0.16.3.tar.gz", hash = "sha256:78e9da576de09e9f3f410d2471ef1c679b7741ddd21b586c97a13872b69bd265", size = 297080, upload-time = "2025-12-23T23:37:33.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/94/93b7f5981aa04f922e0d9ce7326a4587866ec7e39f7c180ffcf408e66ee8/pydocket-0.16.3-py3-none-any.whl", hash = "sha256:e2b50925356e7cd535286255195458ac7bba15f25293356651b36d223db5dd7c", size = 67087, upload-time = "2025-12-23T23:37:31.829Z" }, +] + [[package]] name = "pygments" version = "2.19.1" @@ -1090,6 +1427,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, ] +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + [[package]] name = "pyperclip" version = "1.9.0" @@ -1154,6 +1505,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] +[[package]] +name = "python-json-logger" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, +] + [[package]] name = "python-multipart" version = "0.0.20" @@ -1163,6 +1523,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + [[package]] name = "pywin32-ctypes" version = "0.2.3" @@ -1207,6 +1583,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] +[[package]] +name = "redis" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" }, +] + [[package]] name = "referencing" version = "0.36.2" @@ -1249,6 +1634,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] +[[package]] +name = "rich-rst" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/6d/a506aaa4a9eaa945ed8ab2b7347859f53593864289853c5d6d62b77246e0/rich_rst-1.3.2.tar.gz", hash = "sha256:a1196fdddf1e364b02ec68a05e8ff8f6914fee10fbca2e6b6735f166bb0da8d4", size = 14936, upload-time = "2025-10-14T16:49:45.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/2f/b4530fbf948867702d0a3f27de4a6aab1d156f406d72852ab902c4d04de9/rich_rst-1.3.2-py3-none-any.whl", hash = "sha256:a99b4907cbe118cf9d18b0b44de272efa61f15117c61e39ebdc431baf5df722a", size = 12567, upload-time = "2025-10-14T16:49:42.953Z" }, +] + [[package]] name = "rpds-py" version = "0.26.0" @@ -1377,7 +1775,7 @@ wheels = [ [[package]] name = "selenium" -version = "4.34.0" +version = "4.39.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1387,9 +1785,9 @@ dependencies = [ { name = "urllib3", extra = ["socks"] }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/44/a6df7eae7fe929f18ffe08221fb05215ce991adc718bbe693a8d46ff09b7/selenium-4.34.0.tar.gz", hash = "sha256:8b7eb05a0ed22f9bb2187fd256c28630824ad01d8397b4e68bc0af7dabf26c80", size = 895790, upload-time = "2025-06-29T07:30:09.263Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/19/27c1bf9eb1f7025632d35a956b50746efb4b10aa87f961b263fa7081f4c5/selenium-4.39.0.tar.gz", hash = "sha256:12f3325f02d43b6c24030fc9602b34a3c6865abbb1db9406641d13d108aa1889", size = 928575, upload-time = "2025-12-06T23:12:34.896Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/b3/6a043a6968f263e90537b48870f7366f91a6d4c5cc67e5b656311c98d0f5/selenium-4.34.0-py3-none-any.whl", hash = "sha256:fc3535cfd99a073c21bf9091519b48ed31b34bf2cbd132f62e8c732b2e815b2d", size = 9403599, upload-time = "2025-06-29T07:30:07.012Z" }, + { url = "https://files.pythonhosted.org/packages/58/d0/55a6b7c6f35aad4c8a54be0eb7a52c1ff29a59542fc3e655f0ecbb14456d/selenium-4.39.0-py3-none-any.whl", hash = "sha256:c85f65d5610642ca0f47dae9d5cc117cd9e831f74038bc09fe1af126288200f9", size = 9655249, upload-time = "2025-12-06T23:12:33.085Z" }, ] [[package]] @@ -1455,7 +1853,7 @@ wheels = [ [[package]] name = "trio" -version = "0.30.0" +version = "0.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -1465,9 +1863,9 @@ dependencies = [ { name = "sniffio" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/c1/68d582b4d3a1c1f8118e18042464bb12a7c1b75d64d75111b297687041e3/trio-0.30.0.tar.gz", hash = "sha256:0781c857c0c81f8f51e0089929a26b5bb63d57f927728a5586f7e36171f064df", size = 593776, upload-time = "2025-04-21T00:48:19.507Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/ce/0041ddd9160aac0031bcf5ab786c7640d795c797e67c438e15cfedf815c8/trio-0.32.0.tar.gz", hash = "sha256:150f29ec923bcd51231e1d4c71c7006e65247d68759dd1c19af4ea815a25806b", size = 605323, upload-time = "2025-10-31T07:18:17.466Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/8e/3f6dfda475ecd940e786defe6df6c500734e686c9cd0a0f8ef6821e9b2f2/trio-0.30.0-py3-none-any.whl", hash = "sha256:3bf4f06b8decf8d3cf00af85f40a89824669e2d033bb32469d34840edcfc22a5", size = 499194, upload-time = "2025-04-21T00:48:17.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/bf/945d527ff706233636c73880b22c7c953f3faeb9d6c7e2e85bfbfd0134a0/trio-0.32.0-py3-none-any.whl", hash = "sha256:4ab65984ef8370b79a76659ec87aa3a30c5c7c83ff250b4de88c29a8ab6123c5", size = 512030, upload-time = "2025-10-31T07:18:15.885Z" }, ] [[package]] @@ -1526,32 +1924,32 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] name = "urllib3" -version = "2.4.0" +version = "2.6.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, ] [package.optional-dependencies] @@ -1561,15 +1959,15 @@ socks = [ [[package]] name = "uvicorn" -version = "0.34.0" +version = "0.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568, upload-time = "2024-12-15T13:33:30.42Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload-time = "2024-12-15T13:33:27.467Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, ] [[package]] @@ -1604,6 +2002,86 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, ] +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + [[package]] name = "wsproto" version = "1.2.0" @@ -1689,3 +2167,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 625cef54dce0548c2a30d66a0db9ad2b4ded3265 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 17:21:56 +0100 Subject: [PATCH 253/565] fix(logging): filter noisy fastmcp debug logs Add fakeredis and docket loggers to noise reduction to prevent DEBUG log pollution from FastMCP's internal task queue. --- linkedin_mcp_server/logging_config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 35e0a735..a3448260 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -111,3 +111,5 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> logging.getLogger("selenium").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) + logging.getLogger("fakeredis").setLevel(logging.WARNING) + logging.getLogger("docket").setLevel(logging.WARNING) From 1e554806791b818dbda0b4a1262c80a19d6aac82 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 18:19:09 +0100 Subject: [PATCH 254/565] docs: update tool status table for January 2026 --- README.md | 96 +++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 76 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 5182ad9f..6213e39a 100644 --- a/README.md +++ b/README.md @@ -15,36 +15,52 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-quick-install---universal) [![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) -https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 + ## Usage Examples + ``` What are my recommended jobs I can apply to? ``` + ``` Research the background of this candidate https://www.linkedin.com/in/stickerdaniel/ ``` + ``` Get this company profile for partnership discussions https://www.linkedin.com/company/inframs/ ``` + ``` Suggest improvements for my CV to target this job posting https://www.linkedin.com/jobs/view/4252026496 ``` ## Features & Tool Status -> [!TIP] +> +> [!TIP] Available Tools +> > - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections > - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name -> - **Job Details** (`get_job_details`): Retrieve specific job posting details using LinkedIn job IDs > - **Job Search** (`search_jobs`): Search for jobs with filters like keywords and location -> - **Recommended Jobs** (`get_recommended_jobs`): Get personalized job recommendations based on your profile > - **Session Management** (`close_session`): Properly close browser session and clean up resources -> [!NOTE] -> July 2025: All tools are currently functional and actively maintained. If you encounter any issues, please report them in the [GitHub issues](https://github.com/stickerdaniel/linkedin-mcp-server/issues). +**January 2026 Tool Status:** -
-
+| Tool | Status | Notes | +|------|--------|-------| +| `get_person_profile` | Works | Minor data formatting quirks | +| `get_company_profile` | Works | Minor data formatting quirks | +| `search_jobs` | Works | | +| `close_session` | Works | | +| `get_recommended_jobs` | Fails | See #70 for details | +| `get_job_details` | Fails | See #71 for details | + +> [!NOTE] Authentication +> The most reliable way to authenticate is to clone this repository and run the server manually in `--no-headless` mode where you can see the process visually and solve login challenges in the automated browser. +> +> The server can login with your email and password first time, and extract the cookie from the session for subsequent runs. From time to time, you might need to get a new cookie. +> +> Please try this approach first before opening an authentication related issue. ## ๐Ÿณ Docker Setup (Recommended - Universal) @@ -53,6 +69,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ### Installation **Client Configuration:** + ```json { "mcpServers": { @@ -72,12 +89,13 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ``` ### Getting the LinkedIn Cookie +
๐ŸŒ Chrome DevTools Guide 1. Open LinkedIn and login 2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +3. Go to **Application** > **Storage** > **Cookies** > **** 4. Find the cookie named `li_at` 5. Copy the **Value** field (this is your LinkedIn session cookie) 6. Use this value as your `LINKEDIN_COOKIE` in the configuration @@ -87,26 +105,31 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ๐Ÿณ Docker get-cookie method **Run the server with the `--get-cookie` flag:** + ```bash docker run -it --rm \ stickerdaniel/linkedin-mcp-server:latest \ --get-cookie ``` + Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
-> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. +> [!NOTE] Cookie Issues +> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### Docker Setup Help +
๐Ÿ”ง Configuration **Transport Modes:** + - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For a web-based MCP server **CLI Options:** + - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode @@ -118,6 +141,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c - `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection **HTTP Mode Example (for web-based MCP clients):** + ```bash docker run -it --rm \ -e LINKEDIN_COOKIE="li_at=YOUR_COOKIE_VALUE" \ @@ -127,6 +151,7 @@ docker run -it --rm \ ``` **Test with mcp inspector:** + 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` 2. Click pre-filled token url to open the inspector in your browser 3. Select `Streamable HTTP` as `Transport Type` @@ -140,14 +165,17 @@ docker run -it --rm \ โ— Troubleshooting **Docker issues:** + - Make sure [Docker](https://www.docker.com/get-started/) is installed - Check if Docker is running: `docker ps` **Login issues:** + - Ensure your LinkedIn cookie is set and correct - Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. - LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) +

@@ -158,17 +186,19 @@ docker run -it --rm \ **Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed **One-click installation** for Claude Desktop users: + 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop 3. Set your LinkedIn cookie in the extension settings ### Getting the LinkedIn Cookie +
๐ŸŒ Chrome DevTools Guide 1. Open LinkedIn and login 2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +3. Go to **Application** > **Storage** > **Cookies** > **** 4. Find the cookie named `li_at` 5. Copy the **Value** field (this is your LinkedIn session cookie) 6. Use this value as your `LINKEDIN_COOKIE` in the configuration @@ -178,30 +208,36 @@ docker run -it --rm \ ๐Ÿณ Docker get-cookie method **Run the server with the `--get-cookie` flag:** + ```bash docker run -it --rm \ stickerdaniel/linkedin-mcp-server:latest \ --get-cookie ``` + Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
-> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. +> [!NOTE] Cookie Issues +> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### DXT Extension Setup Help +
โ— Troubleshooting **Docker issues:** + - Make sure [Docker](https://www.docker.com/get-started/) is installed - Check if Docker is running: `docker ps` **Login issues:** + - Ensure your LinkedIn cookie is set and correct - Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. - LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) +

@@ -224,12 +260,13 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ``` ### Getting the LinkedIn Cookie +
๐ŸŒ Chrome DevTools Guide 1. Open LinkedIn and login 2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **https://www.linkedin.com** +3. Go to **Application** > **Storage** > **Cookies** > **** 4. Find the cookie named `li_at` 5. Copy the **Value** field (this is your LinkedIn session cookie) 6. Use this value as your `LINKEDIN_COOKIE` in the configuration @@ -240,21 +277,25 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ๐Ÿš€ uvx get-cookie method **Run the server with the `--get-cookie` flag:** + ```bash uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ linkedin-mcp-server --get-cookie ``` + Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
-> [!NOTE] -> The cookie will expire during the next 30 days. Just get the new cookie and update your client config. There are also many cookie manager extensions that you can use to quickly copy the cookie. +> [!NOTE] Cookie Issues +> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### uvx Setup Help +
๐Ÿ”ง Configuration **Client Configuration:** + ```json { "mcpServers": { @@ -274,10 +315,12 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c ``` **Transport Modes:** + - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For web-based MCP server **CLI Options:** + - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode @@ -289,6 +332,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c - `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection **Basic Usage Examples:** + ```bash # Run with cookie from environment variable LINKEDIN_COOKIE="YOUR_COOKIE_VALUE" uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server @@ -304,12 +348,14 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ``` **HTTP Mode Example (for web-based MCP clients):** + ```bash uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp ``` **Test with mcp inspector:** + 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` 2. Click pre-filled token url to open the inspector in your browser 3. Select `Streamable HTTP` as `Transport Type` @@ -323,17 +369,21 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp โ— Troubleshooting **Installation issues:** + - Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` - Check uv version: `uv --version` (should be 0.4.0 or higher) **Cookie issues:** + - Ensure your LinkedIn cookie is set and correct - Cookie can be passed via `--cookie` flag or `LINKEDIN_COOKIE` environment variable - Make sure you have only one active LinkedIn session per cookie at a time **Login issues:** + - LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie - You might get a captcha challenge if you logged in a lot of times in a short period +

@@ -344,6 +394,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed **ChromeDriver Setup:** + 1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome 2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) 3. **Make it accessible**: @@ -376,10 +427,12 @@ uv run -m linkedin_mcp_server --no-headless --no-lazy-init ``` ### Local Setup Help +
๐Ÿ”ง Configuration **CLI Options:** + - `--no-headless` - Show browser window (debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call @@ -394,11 +447,13 @@ uv run -m linkedin_mcp_server --no-headless --no-lazy-init - `--help` - Show help **HTTP Mode Example (for web-based MCP clients):** + ```bash uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --port 8000 --path /mcp ``` **Claude Desktop:** + ```**json** { "mcpServers": { @@ -416,16 +471,19 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por โ— Troubleshooting **Login/Scraping issues:** + - Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) - Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call - Add `--log-level DEBUG` to see more detailed logging - Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. E.g. if you have a logged in browser session with a docker container, you can't use the same cookie to login with the local setup while the docker container is running / session is not closed. **ChromeDriver issues:** + - Ensure Chrome and ChromeDriver versions match - Check ChromeDriver is in PATH or set `CHROMEDRIVER_PATH` in your env **Python issues:** + - Check Python version: `uv python --version` (should be 3.12+) - Reinstall dependencies: `uv sync --reinstall` @@ -433,12 +491,11 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! -

- ## Acknowledgements + Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [FastMCP](https://gofastmcp.com/). โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. @@ -451,7 +508,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ Star History Chart - ## License This project is licensed under the Apache 2.0 license. From 4dc2cbd8ab9c4aa8cfd19480d3046e25f8c18d63 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 4 Jan 2026 18:22:22 +0100 Subject: [PATCH 255/565] fix(docs): Refactor notes and tips formatting in README Updated formatting for notes and tips in README.md. --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 6213e39a..03fa1131 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ## Features & Tool Status > -> [!TIP] Available Tools +> [!TIP] > > - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections > - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name @@ -55,7 +55,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c | `get_recommended_jobs` | Fails | See #70 for details | | `get_job_details` | Fails | See #71 for details | -> [!NOTE] Authentication +> [!NOTE] > The most reliable way to authenticate is to clone this repository and run the server manually in `--no-headless` mode where you can see the process visually and solve login challenges in the automated browser. > > The server can login with your email and password first time, and extract the cookie from the session for subsequent runs. From time to time, you might need to get a new cookie. @@ -115,7 +115,7 @@ docker run -it --rm \ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above.
-> [!NOTE] Cookie Issues +> [!NOTE] > The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### Docker Setup Help @@ -218,7 +218,7 @@ docker run -it --rm \ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. -> [!NOTE] Cookie Issues +> [!NOTE] > The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### DXT Extension Setup Help @@ -286,7 +286,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. -> [!NOTE] Cookie Issues +> [!NOTE] > The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). ### uvx Setup Help From fcb61e27fc4539b46531cd3f9d3ba44e52890801 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 18:29:05 +0100 Subject: [PATCH 256/565] chore(release): bump version to 1.4.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 17873fd5..eb397edb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.4.0" +version = "1.4.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 311cb3bb..c22667bb 100644 --- a/uv.lock +++ b/uv.lock @@ -779,7 +779,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.4.0" +version = "1.4.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 22066f59462117b11238566a8dfb29784777b768 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 4 Jan 2026 17:30:06 +0000 Subject: [PATCH 257/565] chore(dxt): update manifest.json version to v1.4.1 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index c7e918b3..b79174db 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.4.0", + "version": "1.4.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.4.0" + "stickerdaniel/linkedin-mcp-server:1.4.1" ] } }, From 67b1c16ee669accfb5dcbc88293d090052a9489d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 4 Jan 2026 19:59:46 +0100 Subject: [PATCH 258/565] chore(renovate): enhance config with grouping and automerge - Upgrade to config:best-practices preset - Add group:allNonMajor to reduce PR noise - Enable vulnerability alerts with security label - Group MCP ecosystem packages (fastmcp, mcp) together - Automerge dev tool updates (pytest, ruff, pre-commit, ty) --- renovate.json | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/renovate.json b/renovate.json index 5db72dd6..6862bea6 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,21 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", - "extends": [ - "config:recommended" + "extends": ["config:best-practices", "group:allNonMajor"], + "vulnerabilityAlerts": { + "enabled": true, + "labels": ["security"], + "schedule": ["at any time"] + }, + "packageRules": [ + { + "matchPackageNames": ["fastmcp", "mcp"], + "matchUpdateTypes": ["minor", "patch"], + "groupName": "MCP ecosystem" + }, + { + "matchPackageNames": ["pytest**", "ruff", "pre-commit", "ty"], + "matchUpdateTypes": ["minor", "patch"], + "automerge": true + } ] } From 9ee0c0a5ef28d65ae95a24be63698914c4a5f4da Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 9 Jan 2026 12:46:58 +0100 Subject: [PATCH 259/565] docs(references): add linkedin_scraper as a submodule for exploration --- .gitmodules | 3 +++ docs/references/linkedin_scraper | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 docs/references/linkedin_scraper diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..05892e55 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "docs/references/linkedin_scraper"] + path = docs/references/linkedin_scraper + url = https://github.com/joeyism/linkedin_scraper.git diff --git a/docs/references/linkedin_scraper b/docs/references/linkedin_scraper new file mode 160000 index 00000000..647e88ab --- /dev/null +++ b/docs/references/linkedin_scraper @@ -0,0 +1 @@ +Subproject commit 647e88abbd38719e48ac7a340ec905a8f6a69a06 From 4c7c97f6daf45589bb70ee921078ec68443679cb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 9 Jan 2026 13:09:13 +0100 Subject: [PATCH 260/565] chore(dependencies): update linkedin-scraper to version 3.0.1 and add playwright as a dependency - Upgraded linkedin-scraper from version 2.11.5 to 3.0.1 with updated source. - Added playwright as a new dependency with a minimum version of 1.40.0. - Removed keyring from dependencies. - Updated pyproject.toml and uv.lock files accordingly. --- pyproject.toml | 7 +- uv.lock | 192 +++++++++++++++++++++++-------------------------- 2 files changed, 93 insertions(+), 106 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index eb397edb..1d630891 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,8 +7,8 @@ requires-python = ">=3.12" dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "keyring>=25.6.0", - "linkedin-scraper", + "linkedin-scraper>=3.0.0", + "playwright>=1.40.0", "pyperclip>=1.9.0", ] @@ -26,9 +26,6 @@ exclude = ["assets*", "docs*", "tests*"] [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] -[tool.uv.sources] -linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git" } - [dependency-groups] dev = [ "aiohttp>=3.12.13", diff --git a/uv.lock b/uv.lock index c22667bb..d5b8567d 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,15 @@ version = 1 revision = 2 requires-python = ">=3.12" +[[package]] +name = "aiofiles" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -565,6 +574,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, ] +[[package]] +name = "greenlet" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, + { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, + { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, + { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" }, + { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, + { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, + { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, + { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, + { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, + { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, + { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" }, + { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, + { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, + { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" }, + { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, + { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, + { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, + { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" }, + { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" }, + { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, +] + [[package]] name = "h11" version = "0.14.0" @@ -784,8 +832,8 @@ source = { editable = "." } dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, - { name = "keyring" }, { name = "linkedin-scraper" }, + { name = "playwright" }, { name = "pyperclip" }, ] @@ -804,8 +852,8 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "keyring", specifier = ">=25.6.0" }, - { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git" }, + { name = "linkedin-scraper", specifier = ">=3.0.0" }, + { name = "playwright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, ] @@ -822,13 +870,19 @@ dev = [ [[package]] name = "linkedin-scraper" -version = "2.11.5" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git#30f448df90af834bafb7d9e4caebfd0032605163" } +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "aiofiles" }, { name = "lxml" }, + { name = "playwright" }, + { name = "pydantic" }, { name = "python-dotenv" }, { name = "requests" }, - { name = "selenium" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/ac/af65e5359fcdd08d0cc194674e67106ce40027d5f55142243887681e0462/linkedin_scraper-3.0.1.tar.gz", hash = "sha256:6e9c54fd6b78003d0be370bbfacb69b52bb023c7c07bcc9d8b508d94048ea058", size = 39638, upload-time = "2026-01-07T03:09:52.482Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/c5/7fc84e2fca5608b6c8eec36db4f14e8dd4e59a059da84deba94c49faa875/linkedin_scraper-3.0.1-py3-none-any.whl", hash = "sha256:e121f963d17e0fc1503a4fd1b7c37fb9ccdcfc587dae4ca3defc073a81aff522", size = 44724, upload-time = "2026-01-07T03:09:51.478Z" }, ] [[package]] @@ -1131,18 +1185,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, ] -[[package]] -name = "outcome" -version = "1.3.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" }, -] - [[package]] name = "packaging" version = "25.0" @@ -1179,6 +1221,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, ] +[[package]] +name = "playwright" +version = "1.57.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/b6/e17543cea8290ae4dced10be21d5a43c360096aa2cce0aa7039e60c50df3/playwright-1.57.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:9351c1ac3dfd9b3820fe7fc4340d96c0d3736bb68097b9b7a69bd45d25e9370c", size = 41985039, upload-time = "2025-12-09T08:06:18.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/04/ef95b67e1ff59c080b2effd1a9a96984d6953f667c91dfe9d77c838fc956/playwright-1.57.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4a9d65027bce48eeba842408bcc1421502dfd7e41e28d207e94260fa93ca67e", size = 40775575, upload-time = "2025-12-09T08:06:22.105Z" }, + { url = "https://files.pythonhosted.org/packages/60/bd/5563850322a663956c927eefcf1457d12917e8f118c214410e815f2147d1/playwright-1.57.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:99104771abc4eafee48f47dac2369e0015516dc1ce8c409807d2dd440828b9a4", size = 41985042, upload-time = "2025-12-09T08:06:25.357Z" }, + { url = "https://files.pythonhosted.org/packages/56/61/3a803cb5ae0321715bfd5247ea871d25b32c8f372aeb70550a90c5f586df/playwright-1.57.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:284ed5a706b7c389a06caa431b2f0ba9ac4130113c3a779767dda758c2497bb1", size = 45975252, upload-time = "2025-12-09T08:06:29.186Z" }, + { url = "https://files.pythonhosted.org/packages/83/d7/b72eb59dfbea0013a7f9731878df8c670f5f35318cedb010c8a30292c118/playwright-1.57.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a1bae6c0a07839cdeaddbc0756b3b2b85e476c07945f64ece08f1f956a86f1", size = 45706917, upload-time = "2025-12-09T08:06:32.549Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/3fc9ebd7c95ee54ba6a68d5c0bc23e449f7235f4603fc60534a364934c16/playwright-1.57.0-py3-none-win32.whl", hash = "sha256:1dd93b265688da46e91ecb0606d36f777f8eadcf7fbef12f6426b20bf0c9137c", size = 36553860, upload-time = "2025-12-09T08:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/58/d4/dcdfd2a33096aeda6ca0d15584800443dd2be64becca8f315634044b135b/playwright-1.57.0-py3-none-win_amd64.whl", hash = "sha256:6caefb08ed2c6f29d33b8088d05d09376946e49a73be19271c8cd5384b82b14c", size = 36553864, upload-time = "2025-12-09T08:06:38.915Z" }, + { url = "https://files.pythonhosted.org/packages/6a/60/fe31d7e6b8907789dcb0584f88be741ba388413e4fbce35f1eba4e3073de/playwright-1.57.0-py3-none-win_arm64.whl", hash = "sha256:5f065f5a133dbc15e6e7c71e7bc04f258195755b1c32a432b792e28338c8335e", size = 32837940, upload-time = "2025-12-09T08:06:42.268Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -1418,6 +1479,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/94/93b7f5981aa04f922e0d9ce7326a4587866ec7e39f7c180ffcf408e66ee8/pydocket-0.16.3-py3-none-any.whl", hash = "sha256:e2b50925356e7cd535286255195458ac7bba15f25293356651b36d223db5dd7c", size = 67087, upload-time = "2025-12-23T23:37:31.829Z" }, ] +[[package]] +name = "pyee" +version = "13.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" }, +] + [[package]] name = "pygments" version = "2.19.1" @@ -1447,15 +1520,6 @@ version = "1.9.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" } -[[package]] -name = "pysocks" -version = "1.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload-time = "2019-09-20T02:07:35.714Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" }, -] - [[package]] name = "pytest" version = "8.3.5" @@ -1773,23 +1837,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" }, ] -[[package]] -name = "selenium" -version = "4.39.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "trio" }, - { name = "trio-websocket" }, - { name = "typing-extensions" }, - { name = "urllib3", extra = ["socks"] }, - { name = "websocket-client" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/af/19/27c1bf9eb1f7025632d35a956b50746efb4b10aa87f961b263fa7081f4c5/selenium-4.39.0.tar.gz", hash = "sha256:12f3325f02d43b6c24030fc9602b34a3c6865abbb1db9406641d13d108aa1889", size = 928575, upload-time = "2025-12-06T23:12:34.896Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/d0/55a6b7c6f35aad4c8a54be0eb7a52c1ff29a59542fc3e655f0ecbb14456d/selenium-4.39.0-py3-none-any.whl", hash = "sha256:c85f65d5610642ca0f47dae9d5cc117cd9e831f74038bc09fe1af126288200f9", size = 9655249, upload-time = "2025-12-06T23:12:33.085Z" }, -] - [[package]] name = "shellingham" version = "1.5.4" @@ -1851,37 +1898,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995, upload-time = "2025-03-08T10:55:32.662Z" }, ] -[[package]] -name = "trio" -version = "0.32.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, - { name = "idna" }, - { name = "outcome" }, - { name = "sniffio" }, - { name = "sortedcontainers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/ce/0041ddd9160aac0031bcf5ab786c7640d795c797e67c438e15cfedf815c8/trio-0.32.0.tar.gz", hash = "sha256:150f29ec923bcd51231e1d4c71c7006e65247d68759dd1c19af4ea815a25806b", size = 605323, upload-time = "2025-10-31T07:18:17.466Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/bf/945d527ff706233636c73880b22c7c953f3faeb9d6c7e2e85bfbfd0134a0/trio-0.32.0-py3-none-any.whl", hash = "sha256:4ab65984ef8370b79a76659ec87aa3a30c5c7c83ff250b4de88c29a8ab6123c5", size = 512030, upload-time = "2025-10-31T07:18:15.885Z" }, -] - -[[package]] -name = "trio-websocket" -version = "0.12.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "outcome" }, - { name = "trio" }, - { name = "wsproto" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d1/3c/8b4358e81f2f2cfe71b66a267f023a91db20a817b9425dd964873796980a/trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae", size = 33549, upload-time = "2025-02-25T05:16:58.947Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/19/eb640a397bba49ba49ef9dbe2e7e5c04202ba045b6ce2ec36e9cadc51e04/trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6", size = 21221, upload-time = "2025-02-25T05:16:57.545Z" }, -] - [[package]] name = "ty" version = "0.0.1a12" @@ -1952,11 +1968,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, ] -[package.optional-dependencies] -socks = [ - { name = "pysocks" }, -] - [[package]] name = "uvicorn" version = "0.40.0" @@ -1993,15 +2004,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, ] -[[package]] -name = "websocket-client" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, -] - [[package]] name = "websockets" version = "15.0.1" @@ -2082,18 +2084,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] -[[package]] -name = "wsproto" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload-time = "2022-08-23T19:58:21.447Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload-time = "2022-08-23T19:58:19.96Z" }, -] - [[package]] name = "xmod" version = "1.8.1" From b023081e27f8fc8a796e6ac5f42d9082c82da640 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 10 Jan 2026 01:16:40 +0100 Subject: [PATCH 261/565] refactor(scraper): migrate to Playwright for browser automation and enhance session management - Updated Dockerfile to use Playwright for browser automation instead of Chrome WebDriver. - Simplified authentication flow by implementing session file management. - Removed keyring dependency and adjusted configuration for environment variable usage. - Enhanced error handling and logging for better debugging. - Updated README to reflect changes in authentication and usage instructions. - Refactored tools for company, job, and person scraping to utilize new Playwright-based architecture. --- Dockerfile | 28 +- README.md | 264 +++++--------- linkedin_mcp_server/__init__.py | 6 +- linkedin_mcp_server/authentication.py | 139 ++++--- linkedin_mcp_server/callbacks.py | 38 ++ linkedin_mcp_server/cli.py | 17 - linkedin_mcp_server/cli_main.py | 383 +++++++------------- linkedin_mcp_server/config/__init__.py | 37 +- linkedin_mcp_server/config/loaders.py | 249 +++++-------- linkedin_mcp_server/config/messages.py | 101 ------ linkedin_mcp_server/config/providers.py | 195 ---------- linkedin_mcp_server/config/schema.py | 83 ++--- linkedin_mcp_server/config/secrets.py | 48 --- linkedin_mcp_server/drivers/__init__.py | 38 +- linkedin_mcp_server/drivers/browser.py | 155 ++++++++ linkedin_mcp_server/drivers/chrome.py | 458 ------------------------ linkedin_mcp_server/error_handler.py | 130 +++---- linkedin_mcp_server/exceptions.py | 29 +- linkedin_mcp_server/logging_config.py | 1 - linkedin_mcp_server/server.py | 38 +- linkedin_mcp_server/setup.py | 314 +++------------- linkedin_mcp_server/tools/company.py | 96 ++--- linkedin_mcp_server/tools/job.py | 107 +++--- linkedin_mcp_server/tools/person.py | 97 ++--- linkedin_mcp_server/utils/__init__.py | 5 + linkedin_mcp_server/utils/retry.py | 69 ++++ manifest.json | 12 +- pyproject.toml | 3 + 28 files changed, 978 insertions(+), 2162 deletions(-) create mode 100644 linkedin_mcp_server/callbacks.py delete mode 100644 linkedin_mcp_server/config/messages.py delete mode 100644 linkedin_mcp_server/config/providers.py delete mode 100644 linkedin_mcp_server/config/secrets.py create mode 100644 linkedin_mcp_server/drivers/browser.py delete mode 100644 linkedin_mcp_server/drivers/chrome.py create mode 100644 linkedin_mcp_server/utils/__init__.py create mode 100644 linkedin_mcp_server/utils/retry.py diff --git a/Dockerfile b/Dockerfile index 03393644..6c6e7012 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,28 +1,20 @@ -FROM python:3.13-alpine +FROM mcr.microsoft.com/playwright/python:v1.57.0-noble -# Install system dependencies including Chromium and ChromeDriver -RUN apk add --no-cache \ - git \ - curl \ - chromium \ - chromium-chromedriver - -# Install uv from official image +# Install uv COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ -# Set working directory +# Set working directory and fix ownership WORKDIR /app +RUN chown pwuser:pwuser /app -# Copy project files -COPY . /app +# Copy project files and set ownership +COPY --chown=pwuser:pwuser . /app -# Sync dependencies and install project -RUN --mount=type=cache,target=/root/.cache/uv \ - uv sync --frozen +# Switch to non-root user +USER pwuser -# Create a non-root user -RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app -USER mcpuser +# Sync dependencies and install project +RUN uv sync --frozen # Set entrypoint and default arguments ENTRYPOINT ["uv", "run", "-m", "linkedin_mcp_server"] diff --git a/README.md b/README.md index 03fa1131..ae3f20b4 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ License

-Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, get your recommended jobs, or search for keywords. All from a Docker container on your machine. +Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, search for jobs, or get job details. All from a Docker container on your machine. ## Installation Methods @@ -19,10 +19,6 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your ## Usage Examples -``` -What are my recommended jobs I can apply to? -``` - ``` Research the background of this candidate https://www.linkedin.com/in/stickerdaniel/ ``` @@ -41,24 +37,24 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c > > - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections > - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name -> - **Job Search** (`search_jobs`): Search for jobs with filters like keywords and location +> - **Job Search** (`search_jobs`): Search for jobs with keywords and location filters +> - **Job Details** (`get_job_details`): Get detailed information about a specific job posting > - **Session Management** (`close_session`): Properly close browser session and clean up resources -**January 2026 Tool Status:** +**Tool Status:** | Tool | Status | Notes | |------|--------|-------| -| `get_person_profile` | Works | Minor data formatting quirks | -| `get_company_profile` | Works | Minor data formatting quirks | -| `search_jobs` | Works | | -| `close_session` | Works | | -| `get_recommended_jobs` | Fails | See #70 for details | -| `get_job_details` | Fails | See #71 for details | +| `get_person_profile` | โœ… Works | | +| `get_company_profile` | โœ… Works | | +| `search_jobs` | โœ… Works | Returns job URLs | +| `get_job_details` | โœ… Works | | +| `close_session` | โœ… Works | | > [!NOTE] > The most reliable way to authenticate is to clone this repository and run the server manually in `--no-headless` mode where you can see the process visually and solve login challenges in the automated browser. > -> The server can login with your email and password first time, and extract the cookie from the session for subsequent runs. From time to time, you might need to get a new cookie. +> The server saves your session to `~/.linkedin-mcp/session.json` after successful login. Sessions may expire over time, requiring re-authentication. > > Please try this approach first before opening an authentication related issue. @@ -66,57 +62,67 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. -### Installation +### Authentication Options -**Client Configuration:** +Docker runs headless (no browser window), so you need to authenticate using one of these methods: + +#### Option 1: Cookie Authentication (Simplest) + +Get your LinkedIn `li_at` cookie and pass it to Docker: ```json { "mcpServers": { "linkedin": { "command": "docker", - "args": [ - "run", "--rm", "-i", - "-e", "LINKEDIN_COOKIE", - "stickerdaniel/linkedin-mcp-server:latest" - ], + "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "stickerdaniel/linkedin-mcp-server"], "env": { - "LINKEDIN_COOKIE": "li_at=YOUR_COOKIE_VALUE" + "LINKEDIN_COOKIE": "your_li_at_cookie_value" } } } } ``` -### Getting the LinkedIn Cookie - -
-๐ŸŒ Chrome DevTools Guide +**To get your `li_at` cookie:** +1. Open LinkedIn in your browser and log in +2. Open DevTools (F12) โ†’ Application โ†’ Cookies โ†’ linkedin.com +3. Copy the `li_at` cookie value -1. Open LinkedIn and login -2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **** -4. Find the cookie named `li_at` -5. Copy the **Value** field (this is your LinkedIn session cookie) -6. Use this value as your `LINKEDIN_COOKIE` in the configuration +#### Option 2: Session File (More Reliable) -
-
-๐Ÿณ Docker get-cookie method +Create a session file locally, then mount it into Docker. -**Run the server with the `--get-cookie` flag:** +**Step 1: Create session using uvx (one-time setup)** ```bash -docker run -it --rm \ - stickerdaniel/linkedin-mcp-server:latest \ - --get-cookie +uvx linkedin-mcp-server --get-session ``` -Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. -
+This opens a browser window where you log in manually. The session is saved to `~/.linkedin-mcp/session.json`. + +**Step 2: Configure Claude Desktop with Docker** + +```json +{ + "mcpServers": { + "linkedin": { + "command": "docker", + "args": [ + "run", "--rm", "-i", + "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", + "stickerdaniel/linkedin-mcp-server:latest" + ] + } + } +} +``` > [!NOTE] -> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. + +> [!NOTE] +> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server, so Playwright can't show a browser window. You must create the session on your host machine first, then mount it into Docker. ### Docker Setup Help @@ -130,21 +136,20 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **CLI Options:** +- `--no-headless` - Show browser window (useful for login and debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--get-cookie` - Attempt to login with email and password and extract the LinkedIn cookie -- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login -- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection +- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) +- `--clear-session` - Clear stored LinkedIn session file **HTTP Mode Example (for web-based MCP clients):** ```bash docker run -it --rm \ - -e LINKEDIN_COOKIE="li_at=YOUR_COOKIE_VALUE" \ + -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp \ -p 8080:8080 \ stickerdaniel/linkedin-mcp-server:latest \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp @@ -171,9 +176,8 @@ docker run -it --rm \ **Login issues:** -- Ensure your LinkedIn cookie is set and correct -- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. -- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie +- Make sure you have only one active LinkedIn session at a time +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) @@ -189,37 +193,10 @@ docker run -it --rm \ 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop -3. Set your LinkedIn cookie in the extension settings - -### Getting the LinkedIn Cookie - -
-๐ŸŒ Chrome DevTools Guide - -1. Open LinkedIn and login -2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **** -4. Find the cookie named `li_at` -5. Copy the **Value** field (this is your LinkedIn session cookie) -6. Use this value as your `LINKEDIN_COOKIE` in the configuration - -
-
-๐Ÿณ Docker get-cookie method - -**Run the server with the `--get-cookie` flag:** - -```bash -docker run -it --rm \ - stickerdaniel/linkedin-mcp-server:latest \ - --get-cookie -``` - -Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. -
+3. Create a session using `--get-session` (see Docker instructions above) > [!NOTE] -> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). +> Sessions may expire over time. If you encounter authentication issues, run `--get-session` again. For debugging login issues, use the [local setup](#-local-setup-develop--contribute) with `--no-headless` mode. ### DXT Extension Setup Help @@ -233,10 +210,9 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **Login issues:** -- Ensure your LinkedIn cookie is set and correct -- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. -- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie -- You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) +- Make sure you have only one active LinkedIn session at a time +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` +- You might get a captcha challenge if you logged in frequently, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode @@ -249,45 +225,23 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c ### Installation -Run directly from GitHub without cloning: +**Step 1: Create a session (first time only)** ```bash -# Run directly from GitHub (latest version) -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --help - -# Run with your LinkedIn cookie -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "li_at=YOUR_COOKIE_VALUE" +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ + linkedin-mcp-server --get-session ``` -### Getting the LinkedIn Cookie - -
-๐ŸŒ Chrome DevTools Guide - -1. Open LinkedIn and login -2. Open Chrome DevTools (F12 or right-click โ†’ Inspect) -3. Go to **Application** > **Storage** > **Cookies** > **** -4. Find the cookie named `li_at` -5. Copy the **Value** field (this is your LinkedIn session cookie) -6. Use this value as your `LINKEDIN_COOKIE` in the configuration +This opens a browser for you to log in manually. The session is saved to `~/.linkedin-mcp/session.json`. -
- -
-๐Ÿš€ uvx get-cookie method - -**Run the server with the `--get-cookie` flag:** +**Step 2: Run the server** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ - linkedin-mcp-server --get-cookie +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server ``` -Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client configuration. If this fails with a captcha challenge, use the method above. -
- > [!NOTE] -> The cookie might expire after some time. Just get the new cookie and update your client config. If you encounter issues logging in, please try the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in `--no-headless` mode where you can debug the login process (solve captcha manually). +> Sessions may expire over time. If you encounter authentication issues, run `--get-session` again. For debugging login issues, use `--no-headless` to see the browser window. ### uvx Setup Help @@ -305,10 +259,7 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c "--from", "git+https://github.com/stickerdaniel/linkedin-mcp-server", "linkedin-mcp-server" - ], - "env": { - "LINKEDIN_COOKIE": "li_at=YOUR_COOKIE_VALUE" - } + ] } } } @@ -321,30 +272,23 @@ Copy the cookie from the output and set it as `LINKEDIN_COOKIE` in your client c **CLI Options:** +- `--no-headless` - Show browser window (useful for login and debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--get-cookie` - Attempt to login with email and password and extract the LinkedIn cookie -- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login -- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection +- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) +- `--clear-session` - Clear stored LinkedIn session file **Basic Usage Examples:** ```bash -# Run with cookie from environment variable -LINKEDIN_COOKIE="YOUR_COOKIE_VALUE" uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server - -# Run with cookie via flag -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --cookie "YOUR_COOKIE_VALUE" +# Create a session interactively +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session # Run with debug logging uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --log-level DEBUG - -# Extract cookie with credentials -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-cookie ``` **HTTP Mode Example (for web-based MCP clients):** @@ -373,16 +317,15 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` - Check uv version: `uv --version` (should be 0.4.0 or higher) -**Cookie issues:** +**Session issues:** -- Ensure your LinkedIn cookie is set and correct -- Cookie can be passed via `--cookie` flag or `LINKEDIN_COOKIE` environment variable -- Make sure you have only one active LinkedIn session per cookie at a time +- Session is stored at `~/.linkedin-mcp/session.json` +- Make sure you have only one active LinkedIn session at a time **Login issues:** -- LinkedIn may require a login confirmation in the LinkedIn mobile app for --get-cookie -- You might get a captcha challenge if you logged in a lot of times in a short period +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` +- You might get a captcha challenge if you logged in frequently @@ -391,16 +334,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ## ๐Ÿ Local Setup (Develop & Contribute) -**Prerequisites:** [Chrome browser](https://www.google.com/chrome/) and [Git](https://git-scm.com/downloads) installed - -**ChromeDriver Setup:** - -1. **Check Chrome version**: Chrome โ†’ menu (โ‹ฎ) โ†’ Help โ†’ About Google Chrome -2. **Download matching ChromeDriver**: [Chrome for Testing](https://googlechromelabs.github.io/chrome-for-testing/) -3. **Make it accessible**: - - Place ChromeDriver in PATH (`/usr/local/bin` on macOS/Linux) - - Or set: `export CHROMEDRIVER_PATH=/path/to/chromedriver` - - if no CHROMEDRIVER_PATH is set, the server will try to find it automatically by checking common locations +**Prerequisites:** [Git](https://git-scm.com/downloads) and [uv](https://docs.astral.sh/uv/) installed ### Installation @@ -409,21 +343,22 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp git clone https://github.com/stickerdaniel/linkedin-mcp-server cd linkedin-mcp-server -# 2. Install UV package manager +# 2. Install UV package manager (if not already installed) curl -LsSf https://astral.sh/uv/install.sh | sh -uv python # install python if you don't have it -# 3. Install dependencies and dev dependencies +# 3. Install dependencies uv sync uv sync --group dev -# 4. Install pre-commit hooks +# 4. Install Playwright browser +uv run playwright install chromium + +# 5. Install pre-commit hooks uv run pre-commit install -# 5. Start the server once manually -# You will be prompted to enter your LinkedIn credentials, and they will be securely stored in your OS keychain -# Once logged in, your cookie will be stored in your OS keychain and used for subsequent runs until it expires -uv run -m linkedin_mcp_server --no-headless --no-lazy-init +# 6. Start the server (first run opens browser for manual login) +# Login in the browser window - session will be saved to ~/.linkedin-mcp/session.json +uv run -m linkedin_mcp_server --no-headless ``` ### Local Setup Help @@ -433,17 +368,14 @@ uv run -m linkedin_mcp_server --no-headless --no-lazy-init **CLI Options:** -- `--no-headless` - Show browser window (debugging) +- `--no-headless` - Show browser window (useful for login and debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--no-lazy-init` - Login to LinkedIn immediately instead of waiting for the first tool call -- `--get-cookie` - Login with email and password and extract the LinkedIn cookie -- `--clear-keychain` - Clear all stored LinkedIn credentials and cookies from system keychain -- `--cookie {cookie}` - Pass a specific LinkedIn cookie for login -- `--user-agent {user_agent}` - Specify custom user agent string to prevent anti-scraping detection - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) +- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) +- `--clear-session` - Clear stored LinkedIn session file - `--help` - Show help **HTTP Mode Example (for web-based MCP clients):** @@ -454,7 +386,7 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por **Claude Desktop:** -```**json** +```json { "mcpServers": { "linkedin": { @@ -473,18 +405,18 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por **Login/Scraping issues:** - Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) -- Add `--no-lazy-init` to attempt to login to LinkedIn immediately instead of waiting for the first tool call - Add `--log-level DEBUG` to see more detailed logging -- Make sure you have only one active LinkedIn session per cookie at a time. Trying to open multiple sessions with the same cookie will result in a cookie invalid error. E.g. if you have a logged in browser session with a docker container, you can't use the same cookie to login with the local setup while the docker container is running / session is not closed. +- Make sure you have only one active LinkedIn session at a time -**ChromeDriver issues:** +**Session issues:** -- Ensure Chrome and ChromeDriver versions match -- Check ChromeDriver is in PATH or set `CHROMEDRIVER_PATH` in your env +- Session is stored in `~/.linkedin-mcp/session.json` +- Use `--clear-session` to clear the session and start fresh -**Python issues:** +**Python/Playwright issues:** -- Check Python version: `uv python --version` (should be 3.12+) +- Check Python version: `python --version` (should be 3.12+) +- Reinstall Playwright: `uv run playwright install chromium` - Reinstall dependencies: `uv sync --reinstall` diff --git a/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py index 30c9a9f1..2c28679d 100644 --- a/linkedin_mcp_server/__init__.py +++ b/linkedin_mcp_server/__init__.py @@ -7,17 +7,17 @@ data scraping through a standardized MCP interface. Key Features: -- Secure LinkedIn authentication via session cookies +- Secure LinkedIn authentication via session files - LinkedIn profile, company, and job data scraping - MCP-compliant server implementation using FastMCP -- Chrome WebDriver automation with session persistence +- Playwright browser automation with session persistence - Layered configuration system with secure credential storage - Docker containerization for easy deployment - Claude Desktop DXT extension support Architecture: - Clean separation between authentication, driver management, and MCP server -- Singleton pattern for WebDriver session management +- Singleton pattern for browser session management - Comprehensive error handling and logging - Cross-platform compatibility (macOS, Windows, Linux) """ diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 0d94d1a8..92fd3d02 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -1,105 +1,86 @@ -# linkedin_mcp_server/authentication.py """ -Pure authentication logic for LinkedIn MCP Server. +Authentication logic for LinkedIn MCP Server. -Handles LinkedIn session cookie management with secure storage and retrieval. -Provides layered authentication resolution from configuration, keyring, and user input. -Implements proper error handling with context-aware messaging. +Handles LinkedIn session management with file-based session persistence +and cookie-based authentication for Docker headless mode. """ import logging +import os +from pathlib import Path +from typing import Literal, Optional -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.messages import ErrorMessages, InfoMessages -from linkedin_mcp_server.config.providers import ( - clear_cookie_from_keyring, - get_cookie_from_keyring, - save_cookie_to_keyring, +from linkedin_mcp_server.drivers.browser import ( + DEFAULT_SESSION_PATH, + session_exists, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError -# Constants for cookie validation -MIN_RAW_COOKIE_LENGTH = 110 -MIN_COOKIE_LENGTH = MIN_RAW_COOKIE_LENGTH + len("li_at=") - logger = logging.getLogger(__name__) +AuthSource = Literal["session", "cookie"] -def get_authentication() -> str: - """ - Get LinkedIn cookie from available sources. - - Returns: - str: LinkedIn session cookie - - Raises: - CredentialsNotFoundError: If no authentication is available - """ - config = get_config() - - # First, try environment variable or command line - if config.linkedin.cookie: - logger.info(InfoMessages.using_cookie_from("configuration")) - return config.linkedin.cookie - # Second, try keyring - cookie = get_cookie_from_keyring() - if cookie: - logger.info(InfoMessages.using_cookie_from("keyring")) - return cookie +def get_linkedin_cookie() -> Optional[str]: + """Get LinkedIn cookie from environment variable.""" + return os.environ.get("LINKEDIN_COOKIE") - # No authentication available - raise CredentialsNotFoundError("No LinkedIn cookie found") - -def store_authentication(cookie: str) -> bool: +def get_authentication_source() -> AuthSource: """ - Store LinkedIn cookie securely. + Check available authentication methods in priority order. - Args: - cookie: LinkedIn session cookie to store + Priority: + 1. Session file (most reliable) + 2. LINKEDIN_COOKIE env var (Docker headless) Returns: - bool: True if storage was successful, False otherwise - """ - success = save_cookie_to_keyring(cookie) - if success: - logger.info(InfoMessages.cookie_stored_securely()) - else: - logger.warning(InfoMessages.keyring_storage_failed()) - return success - + String indicating auth source: "session" or "cookie" -def clear_authentication() -> bool: + Raises: + CredentialsNotFoundError: If no authentication method available """ - Clear stored authentication. - - Returns: - bool: True if clearing was successful, False otherwise + # Priority 1: Session file + if session_exists(): + logger.info(f"Using session from {DEFAULT_SESSION_PATH}") + return "session" + + # Priority 2: Cookie from environment + if get_linkedin_cookie(): + logger.info("Using LINKEDIN_COOKIE from environment") + return "cookie" + + raise CredentialsNotFoundError( + "No LinkedIn authentication found.\n\n" + "Options:\n" + " 1. Run with --get-session to create a session file (recommended)\n" + " 2. Set LINKEDIN_COOKIE environment variable with your li_at cookie\n" + " 3. Run with --no-headless to login interactively\n\n" + "For Docker users:\n" + " docker run -it -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp \\\n" + " stickerdaniel/linkedin-mcp-server:latest --get-session" + ) + + +def clear_session(session_path: Optional[Path] = None) -> bool: """ - success = clear_cookie_from_keyring() - if success: - logger.info("Authentication cleared from keyring") - else: - logger.warning("Could not clear authentication from keyring") - return success + Clear stored session file. - -def ensure_authentication() -> str: - """ - Ensure authentication is available with clear error messages. + Args: + session_path: Path to session file Returns: - str: Valid LinkedIn session cookie - - Raises: - CredentialsNotFoundError: If no authentication is available with clear instructions + True if clearing was successful """ - try: - return get_authentication() - except CredentialsNotFoundError: - config = get_config() - - raise CredentialsNotFoundError( - ErrorMessages.no_cookie_found(config.is_interactive) - ) + if session_path is None: + session_path = DEFAULT_SESSION_PATH + + if session_path.exists(): + try: + session_path.unlink() + logger.info(f"Session cleared from {session_path}") + return True + except OSError as e: + logger.warning(f"Could not clear session: {e}") + return False + return True diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py new file mode 100644 index 00000000..2f83fa3a --- /dev/null +++ b/linkedin_mcp_server/callbacks.py @@ -0,0 +1,38 @@ +""" +Progress callbacks for MCP tools. + +Provides callback implementations that log progress for LinkedIn scraping operations. +""" + +import logging +from typing import Any + +from linkedin_scraper.callbacks import ProgressCallback + +logger = logging.getLogger(__name__) + + +class MCPProgressCallback(ProgressCallback): + """Callback that logs progress for MCP tools.""" + + async def on_start(self, scraper_type: str, url: str) -> None: + """Log when scraping starts.""" + logger.info(f"Starting {scraper_type} scrape: {url}") + + async def on_progress(self, message: str, percent: int) -> None: + """Log progress updates.""" + logger.debug(f"Progress ({percent}%): {message}") + + async def on_complete(self, scraper_type: str, result: Any) -> None: + """Log when scraping completes.""" + logger.info(f"Completed {scraper_type} scrape") + + async def on_error(self, error: Exception) -> None: + """Log errors during scraping.""" + logger.error(f"Scrape error: {error}") + + +class SilentCallback(ProgressCallback): + """Callback that produces no output - useful for background operations.""" + + pass diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index 5757c88b..67eae7fa 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -14,8 +14,6 @@ import pyperclip # type: ignore -from linkedin_mcp_server.config import get_config - logger = logging.getLogger(__name__) @@ -26,7 +24,6 @@ def print_claude_config() -> None: This function generates the configuration needed for Claude Desktop and copies it to the clipboard for easy pasting. """ - config = get_config() current_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) # Find the full path to uv executable @@ -48,18 +45,8 @@ def print_claude_config() -> None: "run", "-m", "linkedin_mcp_server", - "--no-setup", ] - # Add environment variables to the configuration - env_vars: Dict[str, str] = {} - if config.linkedin.email: - env_vars["LINKEDIN_EMAIL"] = config.linkedin.email - if config.linkedin.password: - env_vars["LINKEDIN_PASSWORD"] = config.linkedin.password - if config.chrome.chromedriver_path: - env_vars["CHROMEDRIVER"] = config.chrome.chromedriver_path - config_json: Dict[str, Any] = { "mcpServers": { "linkedin-scraper": { @@ -76,10 +63,6 @@ def print_claude_config() -> None: } } - # Add environment variables if available - if env_vars: - config_json["mcpServers"]["linkedin-scraper"]["env"] = env_vars - # Convert to string for clipboard config_str = json.dumps(config_json, indent=2) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 4d345821..4b34c3c0 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -1,41 +1,39 @@ -# linkedin_mcp_server/cli_main.py """ LinkedIn MCP Server - Main CLI application entry point. -Implements a three-phase startup: -1. Authentication Setup Phase - Credential validation and session establishment -2. Driver Management Phase - Chrome WebDriver initialization with LinkedIn login -3. Server Runtime Phase - MCP server startup with transport selection - +Implements a simplified two-phase startup: +1. Authentication Check - Verify session file is available +2. Server Runtime - MCP server startup with transport selection """ +import asyncio import io import logging import sys from typing import Literal -import inquirer # type: ignore -from linkedin_scraper.exceptions import ( - CaptchaRequiredError, - InvalidCredentialsError, - LoginTimeoutError, - RateLimitError, - SecurityChallengeError, - TwoFactorAuthError, -) +import inquirer + +from linkedin_scraper import is_logged_in +from linkedin_scraper.core.exceptions import AuthenticationError, RateLimitError +from linkedin_mcp_server.authentication import ( + clear_session, + get_authentication_source, +) from linkedin_mcp_server.cli import print_claude_config -from linkedin_mcp_server.config import ( - check_keychain_data_exists, - clear_all_keychain_data, - get_config, - get_keyring_name, +from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.drivers.browser import ( + DEFAULT_SESSION_PATH, + close_browser, + get_or_create_browser, + session_exists, + set_headless, ) -from linkedin_mcp_server.drivers.chrome import close_all_drivers, get_or_create_driver -from linkedin_mcp_server.exceptions import CredentialsNotFoundError, LinkedInMCPError +from linkedin_mcp_server.exceptions import CredentialsNotFoundError from linkedin_mcp_server.logging_config import configure_logging -from linkedin_mcp_server.server import create_mcp_server, shutdown_handler -from linkedin_mcp_server.setup import run_cookie_extraction_setup, run_interactive_setup +from linkedin_mcp_server.server import create_mcp_server +from linkedin_mcp_server.setup import run_interactive_setup, run_session_creation sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") @@ -63,57 +61,28 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: return answers["transport"] -def clear_keychain_and_exit() -> None: - """Clear LinkedIn keychain data and exit.""" +def clear_session_and_exit() -> None: + """Clear LinkedIn session and exit.""" config = get_config() - # Configure logging configure_logging( log_level=config.server.log_level, json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) - # Get version for logging version = get_version() + logger.info(f"LinkedIn MCP Server v{version} - Session Clear mode") - logger.info(f"LinkedIn MCP Server v{version} - Keychain Clear mode started") - - # Check what exists in keychain - existing = check_keychain_data_exists() - - # If nothing exists, inform user and exit - if not existing["has_any"]: - print("โ„น๏ธ No LinkedIn data found in keychain") + if not session_exists(): + print("โ„น๏ธ No session file found") print("Nothing to clear.") sys.exit(0) - # Show confirmation prompt for existing items only - keyring_name = get_keyring_name() - print(f"๐Ÿ”‘ Clear LinkedIn data from {keyring_name}?") - print("This will remove:") - - items_to_remove = [] - if existing["has_credentials"]: - credential_parts = [] - if existing["has_email"]: - credential_parts.append("email") - if existing["has_password"]: - credential_parts.append("password") - items_to_remove.append(f" โ€ข LinkedIn {' and '.join(credential_parts)}") - - if existing["has_cookie"]: - items_to_remove.append(" โ€ข LinkedIn session cookie") + print(f"๐Ÿ”‘ Clear LinkedIn session from {DEFAULT_SESSION_PATH}?") - for item in items_to_remove: - print(item) - print() - - # Get user confirmation try: confirmation = ( - input("Are you sure you want to clear this keychain data? (y/N): ") - .strip() - .lower() + input("Are you sure you want to clear the session? (y/N): ").strip().lower() ) if confirmation not in ("y", "yes"): print("โŒ Operation cancelled") @@ -122,154 +91,109 @@ def clear_keychain_and_exit() -> None: print("\nโŒ Operation cancelled") sys.exit(0) - try: - # Clear all keychain data - success = clear_all_keychain_data() - - if success: - logger.info("Keychain data cleared successfully") - print("โœ… LinkedIn keychain data cleared successfully!") - else: - logger.error("Failed to clear keychain data") - print("โŒ Failed to clear some keychain data - check logs for details") - sys.exit(1) - - except Exception as e: - logger.error(f"Error clearing keychain: {e}") - print(f"โŒ Error clearing keychain: {e}") + if clear_session(): + print("โœ… LinkedIn session cleared successfully!") + else: + print("โŒ Failed to clear session") sys.exit(1) sys.exit(0) -def get_cookie_and_exit() -> None: - """Get LinkedIn cookie and exit (for Docker setup).""" +def get_session_and_exit() -> None: + """Create session interactively and exit.""" config = get_config() - # Configure logging configure_logging( log_level=config.server.log_level, json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) - # Get version for logging version = get_version() + logger.info(f"LinkedIn MCP Server v{version} - Session Creation mode") - logger.info(f"LinkedIn MCP Server v{version} - Cookie Extraction mode started") + output_path = config.server.session_output_path + success = run_session_creation(output_path) - try: - # Run cookie extraction setup - cookie = run_cookie_extraction_setup() - - logger.info("Cookie extraction successful") - print("โœ… Login successful!") - print("๐Ÿช LinkedIn Cookie extracted:") - print(cookie) + sys.exit(0 if success else 1) - # Try to copy to clipboard - clipboard_success = False - try: - import pyperclip - pyperclip.copy(cookie) - clipboard_success = True - print("๐Ÿ“‹ Cookie copied to clipboard!") - except Exception as e: - logger.debug(f"pyperclip clipboard failed: {e}") +def session_info_and_exit() -> None: + """Check session validity and display info, then exit.""" + config = get_config() - if not clipboard_success: - print( - "๐Ÿ’ก Set this cookie as an environment variable in your config or pass it with --cookie flag" - ) + configure_logging( + log_level=config.server.log_level, + json_format=not config.is_interactive and config.server.log_level != "DEBUG", + ) - except Exception as e: - logger.error(f"Error getting cookie: {e}") + version = get_version() + logger.info(f"LinkedIn MCP Server v{version} - Session Info mode") - # Provide specific guidance for security challenges - error_msg = str(e).lower() - if "security challenge" in error_msg or "captcha" in error_msg: - print("โŒ LinkedIn security challenge detected") - print("๐Ÿ’ก Try one of these solutions:") - print( - " 1. Use an existing LinkedIn cookie from your browser instead (see instructions below)" - ) - print( - " 2. Use --no-headless flag (manual installation required, does not work with Docker) and solve the security challenge manually" - ) - print("\n๐Ÿช To get your LinkedIn cookie manually:") - print(" 1. Login to LinkedIn in your browser") - print(" 2. Open Developer Tools (F12)") - print(" 3. Go to Application/Storage > Cookies > www.linkedin.com") - print(" 4. Copy the 'li_at' cookie value") - print(" 5. Set LINKEDIN_COOKIE environment variable or use --cookie flag") - elif "invalid credentials" in error_msg: - print("โŒ Invalid LinkedIn credentials") - print("๐Ÿ’ก Please check your email and password") - else: - print("โŒ Failed to obtain cookie - check your credentials") + # Check if session file exists first + if not session_exists(): + print(f"โŒ No session file found at {DEFAULT_SESSION_PATH}") + print(" Run with --get-session to create a session") sys.exit(1) - sys.exit(0) + # Check if session is valid by testing login status + async def check_session() -> bool: + try: + set_headless(True) # Always check headless + browser = await get_or_create_browser() + valid = await is_logged_in(browser.page) + await close_browser() + return valid + except Exception as e: + logger.error(f"Error checking session: {e}") + return False + valid = asyncio.run(check_session()) -def ensure_authentication_ready() -> str: + if valid: + print(f"โœ… Session is valid: {DEFAULT_SESSION_PATH}") + sys.exit(0) + else: + print(f"โŒ Session expired or invalid: {DEFAULT_SESSION_PATH}") + print(" Run with --get-session to re-authenticate") + sys.exit(1) + + +def ensure_authentication_ready() -> None: """ - Phase 1: Ensure authentication is ready before any drivers are created. + Phase 1: Ensure authentication is ready. - Returns: - str: Valid LinkedIn session cookie + Checks for existing session file. + If not found, runs interactive setup in interactive mode. Raises: CredentialsNotFoundError: If authentication setup fails """ config = get_config() - # Check if we already have a cookie in config (from keyring, env, or args) - if config.linkedin.cookie: - logger.info("Using LinkedIn cookie from configuration") - return config.linkedin.cookie + # Check for existing session + try: + get_authentication_source() + return + + except CredentialsNotFoundError: + pass - # If in non-interactive mode and no cookie, fail immediately + # No authentication found - try interactive setup if possible if not config.is_interactive: raise CredentialsNotFoundError( - "No LinkedIn cookie found for non-interactive mode. You can:\n" - " 1. Run with --get-cookie to extract a cookie using email/password\n" - " 2. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie" + "No LinkedIn session found.\n" + "Options:\n" + " 1. Run with --get-session to create a session\n" + " 2. Run with --no-headless to login interactively" ) - # Run interactive setup to get credentials and obtain cookie - logger.info("Setting up LinkedIn authentication...") - return run_interactive_setup() - + # Run interactive setup + logger.info("No authentication found, starting interactive setup...") + success = run_interactive_setup() -def initialize_driver_with_auth(authentication: str) -> None: - """ - Phase 2: Initialize driver using existing authentication. - - Args: - authentication: LinkedIn session cookie - - Raises: - Various exceptions if driver creation or login fails - """ - config = get_config() - - if config.server.lazy_init: - logger.info( - "Using lazy initialization - driver will be created on first tool call" - ) - return - - logger.info("Initializing Chrome WebDriver and logging in...") - - try: - # Create driver and login with provided authentication - get_or_create_driver(authentication) - logger.info("โœ… Web driver initialized and authenticated successfully") - - except Exception as e: - logger.error(f"Failed to initialize driver: {e}") - raise e + if not success: + raise CredentialsNotFoundError("Interactive setup was cancelled or failed") def get_version() -> str: @@ -289,131 +213,90 @@ def get_version() -> str: def main() -> None: - """Main application entry point with clear phase separation.""" - - # Get configuration (this sets config.is_interactive) + """Main application entry point.""" config = get_config() - # Configure logging FIRST (before any logger usage) + # Configure logging configure_logging( log_level=config.server.log_level, json_format=not config.is_interactive and config.server.log_level != "DEBUG", ) - # Get version for logging/display version = get_version() - # Only print banner in interactive mode (to avoid interfering with MCP protocol) + # Print banner in interactive mode if config.is_interactive: print(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") print("=" * 40) - # Always log version (this goes to stderr/logging, not stdout) - logger.info(f"๐Ÿ”— LinkedIn MCP Server v{version} ๐Ÿ”—") + logger.info(f"LinkedIn MCP Server v{version}") + + # Set headless mode from config + set_headless(config.browser.headless) + + # Handle --clear-session flag + if config.server.clear_session: + clear_session_and_exit() - # Handle --clear-keychain flag immediately - if config.server.clear_keychain: - clear_keychain_and_exit() + # Handle --get-session flag + if config.server.get_session: + get_session_and_exit() - # Handle --get-cookie flag immediately - if config.server.get_cookie: - get_cookie_and_exit() + # Handle --session-info flag + if config.server.session_info: + session_info_and_exit() logger.debug(f"Server configuration: {config}") # Phase 1: Ensure Authentication is Ready try: - authentication = ensure_authentication_ready() + ensure_authentication_ready() print("โœ… Authentication ready") logger.info("Authentication ready") + except CredentialsNotFoundError as e: logger.error(f"Authentication setup failed: {e}") if config.is_interactive: - print( - "\nโŒ Authentication required - please provide LinkedIn's li_at cookie" - ) + print("\nโŒ Authentication required") + print(str(e)) else: - # TODO: make claude desktop handle this without terminating - print("\nโŒ Cookie required for Docker/non-interactive mode") - + print("\nโŒ Authentication required for non-interactive mode") sys.exit(1) + except KeyboardInterrupt: print("\n\n๐Ÿ‘‹ Setup cancelled by user") sys.exit(0) - except Exception as e: - logger.error(f"Unexpected error during authentication setup: {e}") - print("\nโŒ Setup failed - please try again") - sys.exit(1) - - # Phase 2: Initialize Driver (if not lazy) - try: - initialize_driver_with_auth(authentication) - except InvalidCredentialsError as e: - logger.error(f"Driver initialization failed with invalid credentials: {e}") - # Cookie was already cleared in driver layer - # In interactive mode, try setup again - if config.is_interactive: - print(f"\nโŒ {str(e)}") - print("๐Ÿ”„ Starting interactive setup for new authentication...") - try: - new_authentication = run_interactive_setup() - # Try again with new authentication - initialize_driver_with_auth(new_authentication) - logger.info("โœ… Successfully authenticated with new credentials") - except Exception as setup_error: - logger.error(f"Setup failed: {setup_error}") - print(f"\nโŒ Setup failed: {setup_error}") - sys.exit(1) - else: - print(f"\nโŒ {str(e)}") - if not config.server.lazy_init: - sys.exit(1) - except ( - LinkedInMCPError, - CaptchaRequiredError, - SecurityChallengeError, - TwoFactorAuthError, - RateLimitError, - LoginTimeoutError, - ) as e: - logger.error(f"Driver initialization failed: {e}") + except (AuthenticationError, RateLimitError) as e: + logger.error(f"LinkedIn error during setup: {e}") print(f"\nโŒ {str(e)}") - if not config.server.lazy_init: - sys.exit(1) + sys.exit(1) + except Exception as e: - logger.error(f"Unexpected error during driver initialization: {e}") - print(f"\nโŒ Driver initialization failed: {e}") - if not config.server.lazy_init: - sys.exit(1) + logger.error(f"Unexpected error during authentication setup: {e}") + print(f"\nโŒ Setup failed: {e}") + sys.exit(1) - # Phase 3: Server Runtime + # Phase 2: Server Runtime try: - # Decide transport using the new config system transport = config.server.transport - # Only show transport prompt if: - # a) running in interactive environment AND - # b) transport wasn't explicitly set via CLI/env + # Prompt for transport in interactive mode if not explicitly set if config.is_interactive and not config.server.transport_explicitly_set: print("\n๐Ÿš€ Server ready! Choose transport mode:") transport = choose_transport_interactive() - elif not config.is_interactive and not config.server.transport_explicitly_set: - # If non-interactive and no transport explicitly set, use default (stdio) - transport = config.server.transport - # Print configuration for Claude if in interactive mode and using stdio transport + # Print Claude config in interactive stdio mode if config.is_interactive and transport == "stdio": print_claude_config() # Create and run the MCP server mcp = create_mcp_server() - # Start server print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") if transport == "streamable-http": print( - f"๐Ÿ“ก HTTP server will be available at http://{config.server.host}:{config.server.port}{config.server.path}" + f"๐Ÿ“ก HTTP server at http://{config.server.host}:{config.server.port}{config.server.path}" ) mcp.run( transport=transport, @@ -427,6 +310,7 @@ def main() -> None: except KeyboardInterrupt: print("\nโน๏ธ Server stopped by user") exit_gracefully(0) + except Exception as e: logger.error(f"Server runtime error: {e}") print(f"\nโŒ Server error: {e}") @@ -434,15 +318,8 @@ def main() -> None: def exit_gracefully(exit_code: int = 0) -> None: - """Exit the application gracefully, cleaning up resources.""" + """Exit the application gracefully.""" print("๐Ÿ‘‹ Shutting down LinkedIn MCP server...") - - # Clean up drivers - close_all_drivers() - - # Clean up server - shutdown_handler() - sys.exit(exit_code) diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index 5e332645..e69da0d2 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -1,33 +1,15 @@ -# src/linkedin_mcp_server/config/__init__.py """ -Configuration system initialization and management for LinkedIn MCP Server. +Configuration system for LinkedIn MCP Server. -This module provides the main configuration interface and implements the singleton -pattern for configuration management. It orchestrates the loading of configuration -from multiple sources and provides a unified API for accessing configuration -throughout the application. - -Key Features: -- Singleton pattern for global configuration access -- Integration with all configuration providers and loaders -- Unified API for configuration access across the application -- Proper initialization and lifecycle management -- Support for configuration reloading and updates +Provides a singleton pattern for configuration management with +loading from CLI arguments and environment variables. """ import logging from typing import Optional from .loaders import load_config -from .providers import ( - clear_credentials_from_keyring, - clear_all_keychain_data, - check_keychain_data_exists, - get_credentials_from_keyring, - get_keyring_name, - save_credentials_to_keyring, -) -from .schema import AppConfig, ChromeConfig, LinkedInConfig, ServerConfig +from .schema import AppConfig, BrowserConfig, ServerConfig logger = logging.getLogger(__name__) @@ -41,7 +23,6 @@ def get_config() -> AppConfig: if _config is None: _config = load_config() logger.debug("Configuration loaded") - # At this point _config is guaranteed to be AppConfig, not None return _config # type: ignore[return-value] @@ -52,18 +33,10 @@ def reset_config() -> None: logger.debug("Configuration reset") -# Export schema classes for type annotations __all__ = [ "AppConfig", - "ChromeConfig", - "LinkedInConfig", + "BrowserConfig", "ServerConfig", "get_config", "reset_config", - "get_credentials_from_keyring", - "save_credentials_to_keyring", - "clear_credentials_from_keyring", - "clear_all_keychain_data", - "check_keychain_data_exists", - "get_keyring_name", ] diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 0333539a..d96a2e08 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -1,31 +1,14 @@ -# src/linkedin_mcp_server/config/loaders.py """ Configuration loading and argument parsing for LinkedIn MCP Server. -This module implements the layered configuration system that loads settings from -multiple sources in priority order: CLI arguments โ†’ environment variables โ†’ keyring -โ†’ defaults. It provides the main configuration loading logic and argument parsing -for the MCP server. - -Key Functions: -- Command-line argument parsing with comprehensive options -- Environment variable parsing with type conversion -- Integration with keyring providers for secure credential loading -- Chrome driver path auto-detection and validation -- Layered configuration with proper priority handling +Loads settings from CLI arguments and environment variables. """ import argparse import logging import os import sys -from typing import Any, Dict, Optional -from .providers import ( - get_chromedriver_paths, - get_cookie_from_keyring, - get_credentials_from_keyring, -) from .schema import AppConfig logger = logging.getLogger(__name__) @@ -38,35 +21,10 @@ class EnvironmentKeys: """Environment variable names used by the application.""" - # LinkedIn configuration - LINKEDIN_EMAIL = "LINKEDIN_EMAIL" - LINKEDIN_PASSWORD = "LINKEDIN_PASSWORD" - LINKEDIN_COOKIE = "LINKEDIN_COOKIE" - - # Chrome configuration - CHROMEDRIVER = "CHROMEDRIVER" HEADLESS = "HEADLESS" - USER_AGENT = "USER_AGENT" - - # Server configuration LOG_LEVEL = "LOG_LEVEL" - LAZY_INIT = "LAZY_INIT" TRANSPORT = "TRANSPORT" - - -def find_chromedriver() -> Optional[str]: - """Find the ChromeDriver executable in common locations.""" - # First check environment variable - if path := os.getenv("CHROMEDRIVER"): - if os.path.exists(path): - return path - - # Check common locations - for path in get_chromedriver_paths(): - if os.path.exists(path) and (os.access(path, os.X_OK) or path.endswith(".exe")): - return path - - return None + LINKEDIN_COOKIE = "LINKEDIN_COOKIE" def is_interactive_environment() -> bool: @@ -74,56 +32,17 @@ def is_interactive_environment() -> bool: Detect if running in an interactive environment (TTY). Returns: - bool: True if both stdin and stdout are TTY devices + True if both stdin and stdout are TTY devices """ try: return sys.stdin.isatty() and sys.stdout.isatty() except (AttributeError, OSError): - # Handle cases where stdin/stdout might not have isatty() or fail - # This can happen in some containers, test environments, or non-standard setups return False -def load_from_keyring(config: AppConfig) -> AppConfig: - """Load configuration from system keyring.""" - # Load LinkedIn cookie first (higher priority) - if cookie := get_cookie_from_keyring(): - config.linkedin.cookie = cookie - logger.debug("LinkedIn cookie loaded from keyring") - - # Load LinkedIn credentials if cookie not available - if not config.linkedin.cookie: - credentials = get_credentials_from_keyring() - if credentials["email"]: - config.linkedin.email = credentials["email"] - logger.debug("LinkedIn email loaded from keyring") - if credentials["password"]: - config.linkedin.password = credentials["password"] - logger.debug("LinkedIn password loaded from keyring") - - return config - - def load_from_env(config: AppConfig) -> AppConfig: """Load configuration from environment variables.""" - # LinkedIn credentials - if email := os.environ.get(EnvironmentKeys.LINKEDIN_EMAIL): - config.linkedin.email = email - - if password := os.environ.get(EnvironmentKeys.LINKEDIN_PASSWORD): - config.linkedin.password = password - - if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): - config.linkedin.cookie = cookie - - # ChromeDriver configuration - if chromedriver := os.environ.get(EnvironmentKeys.CHROMEDRIVER): - config.chrome.chromedriver_path = chromedriver - - if user_agent := os.environ.get(EnvironmentKeys.USER_AGENT): - config.chrome.user_agent = user_agent - # Log level if log_level_env := os.environ.get(EnvironmentKeys.LOG_LEVEL): log_level_upper = log_level_env.upper() @@ -132,15 +51,9 @@ def load_from_env(config: AppConfig) -> AppConfig: # Headless mode if os.environ.get(EnvironmentKeys.HEADLESS) in FALSY_VALUES: - config.chrome.headless = False + config.browser.headless = False elif os.environ.get(EnvironmentKeys.HEADLESS) in TRUTHY_VALUES: - config.chrome.headless = True - - # Lazy initialization - if os.environ.get(EnvironmentKeys.LAZY_INIT) in TRUTHY_VALUES: - config.server.lazy_init = True - elif os.environ.get(EnvironmentKeys.LAZY_INIT) in FALSY_VALUES: - config.server.lazy_init = False + config.browser.headless = True # Transport mode if transport_env := os.environ.get(EnvironmentKeys.TRANSPORT): @@ -150,6 +63,10 @@ def load_from_env(config: AppConfig) -> AppConfig: elif transport_env == "streamable-http": config.server.transport = "streamable-http" + # LinkedIn cookie for headless auth + if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): + config.server.linkedin_cookie = cookie + return config @@ -162,7 +79,7 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--no-headless", action="store_true", - help="Run Chrome with a visible browser window (useful for debugging)", + help="Run browser with a visible window (useful for login and debugging)", ) parser.add_argument( @@ -171,12 +88,6 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Set logging level (default: WARNING)", ) - parser.add_argument( - "--no-lazy-init", - action="store_true", - help="Initialize Chrome driver and login immediately", - ) - parser.add_argument( "--transport", choices=["stdio", "streamable-http"], @@ -205,49 +116,78 @@ def load_from_args(config: AppConfig) -> AppConfig: help="HTTP server path (default: /mcp)", ) + # Browser configuration parser.add_argument( - "--chromedriver", + "--slow-mo", + type=int, + default=0, + metavar="MS", + help="Slow down browser actions by N milliseconds (debugging)", + ) + + parser.add_argument( + "--user-agent", type=str, - help="Specify the path to the ChromeDriver executable", + default=None, + help="Custom browser user agent", ) parser.add_argument( - "--get-cookie", - action="store_true", - help="Login with credentials and display cookie for Docker setup", + "--viewport", + type=str, + default="1280x720", + metavar="WxH", + help="Browser viewport size (default: 1280x720)", ) + # Retry configuration parser.add_argument( - "--clear-keychain", - action="store_true", - help="Clear all stored LinkedIn credentials and cookies from system keychain", + "--retry-attempts", + type=int, + default=3, + metavar="N", + help="Max retry attempts for transient failures (default: 3)", ) parser.add_argument( - "--cookie", - type=str, - help="Specify LinkedIn cookie directly", + "--retry-backoff", + type=float, + default=2.0, + metavar="S", + help="Backoff multiplier between retries (default: 2.0)", ) + # Session management parser.add_argument( - "--user-agent", - type=str, - help="Specify custom user agent string to prevent anti-scraping detection", + "--get-session", + nargs="?", + const="~/.linkedin-mcp/session.json", + default=None, + metavar="PATH", + help="Login interactively and save session (default: ~/.linkedin-mcp/session.json)", + ) + + parser.add_argument( + "--session-info", + action="store_true", + help="Check if current session is valid and exit", + ) + + parser.add_argument( + "--clear-session", + action="store_true", + help="Clear stored LinkedIn session file", ) args = parser.parse_args() # Update configuration with parsed arguments if args.no_headless: - config.chrome.headless = False + config.browser.headless = False - # Handle log level argument if args.log_level: config.server.log_level = args.log_level - if args.no_lazy_init: - config.server.lazy_init = False - if args.transport: config.server.transport = args.transport config.server.transport_explicitly_set = True @@ -261,33 +201,37 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.path: config.server.path = args.path - if args.chromedriver: - config.chrome.chromedriver_path = args.chromedriver - - if args.get_cookie: - config.server.get_cookie = True - if args.clear_keychain: - config.server.clear_keychain = True - if args.cookie: - config.linkedin.cookie = args.cookie + # Browser configuration + if args.slow_mo: + config.browser.slow_mo = args.slow_mo if args.user_agent: - config.chrome.user_agent = args.user_agent + config.browser.user_agent = args.user_agent - return config + if args.viewport: + try: + width, height = args.viewport.lower().split("x") + config.browser.viewport_width = int(width) + config.browser.viewport_height = int(height) + except ValueError: + logger.warning(f"Invalid viewport format: {args.viewport}, using default") + # Retry configuration + config.server.retry_attempts = args.retry_attempts + config.server.retry_backoff = args.retry_backoff -def detect_environment() -> Dict[str, Any]: - """ - Detect environment settings without side effects. + # Session management + if args.get_session is not None: + config.server.get_session = True + config.server.session_output_path = args.get_session - Returns: - Dict containing detected environment settings - """ - return { - "chromedriver_path": find_chromedriver(), - "is_interactive": is_interactive_environment(), - } + if args.session_info: + config.server.session_info = True + + if args.clear_session: + config.server.clear_session = True + + return config def load_config() -> AppConfig: @@ -297,34 +241,17 @@ def load_config() -> AppConfig: Configuration is loaded in the following priority order: 1. Command line arguments (highest priority) 2. Environment variables - 3. System keyring - 4. Auto-detection (ChromeDriver, interactive mode) - 5. Defaults (lowest priority) + 3. Defaults (lowest priority) Returns: - AppConfig: Fully configured application settings - - Raises: - ConfigurationError: If configuration validation fails + Fully configured application settings """ # Start with default configuration config = AppConfig() - # Apply environment detection - env_settings = detect_environment() - - # Set detected values if not already configured - if env_settings["chromedriver_path"] and not config.chrome.chromedriver_path: - config.chrome.chromedriver_path = env_settings["chromedriver_path"] - logger.debug( - f"Auto-detected ChromeDriver found at: {env_settings['chromedriver_path']}" - ) - - config.is_interactive = env_settings["is_interactive"] - logger.debug(f"Auto-detected interactive mode: {config.is_interactive}") - - # Load from keyring (lowest override priority) - config = load_from_keyring(config) + # Set interactive mode + config.is_interactive = is_interactive_environment() + logger.debug(f"Interactive mode: {config.is_interactive}") # Override with environment variables config = load_from_env(config) diff --git a/linkedin_mcp_server/config/messages.py b/linkedin_mcp_server/config/messages.py deleted file mode 100644 index 2008530e..00000000 --- a/linkedin_mcp_server/config/messages.py +++ /dev/null @@ -1,101 +0,0 @@ -# linkedin_mcp_server/config/messages.py -""" -Centralized message formatting for consistent user communication across contexts. - -Provides structured error and informational messages with context-aware formatting -for interactive vs non-interactive modes and different authentication scenarios. -""" - - -class ErrorMessages: - """Centralized error message formatting for consistent communication.""" - - @staticmethod - def no_cookie_found(is_interactive: bool) -> str: - """ - Generate appropriate error message when no LinkedIn cookie is found. - - Args: - is_interactive: Whether the application is running in interactive mode - - Returns: - str: Formatted error message with appropriate instructions - """ - if is_interactive: - return "No LinkedIn authentication found. Please run setup to configure authentication." - else: - return ( - "No LinkedIn cookie found. You can:\n" - " 1. Run with --get-cookie to extract a cookie using email/password\n" - " 2. Set LINKEDIN_COOKIE environment variable with a valid LinkedIn session cookie" - ) - - @staticmethod - def no_credentials_found() -> str: - """Error message when credentials are required but not found.""" - return ( - "No LinkedIn credentials found. Please provide credentials via " - "environment variables (LINKEDIN_EMAIL, LINKEDIN_PASSWORD) for setup." - ) - - @staticmethod - def invalid_cookie_format(cookie_sample: str) -> str: - """ - Error message for invalid cookie format. - - Args: - cookie_sample: Sample of the invalid cookie (truncated for security) - - Returns: - str: Formatted error message - """ - # Only show first 20 characters for security - safe_sample = ( - cookie_sample[:20] + "..." if len(cookie_sample) > 20 else cookie_sample - ) - return ( - f"Invalid LinkedIn cookie format: '{safe_sample}'. " - "Cookie should be a LinkedIn session token (li_at=...) or raw token value." - ) - - @staticmethod - def authentication_setup_instructions() -> str: - """Instructions for setting up authentication.""" - return ( - "To set up LinkedIn authentication:\n" - " 1. Run with --get-cookie flag to extract a session cookie\n" - " 2. Or set LINKEDIN_COOKIE environment variable\n" - " 3. Or run interactively to enter credentials" - ) - - -class InfoMessages: - """Centralized informational message formatting.""" - - @staticmethod - def credentials_stored_securely() -> str: - """Message when credentials are successfully stored.""" - return "Credentials stored securely in system keyring" - - @staticmethod - def cookie_stored_securely() -> str: - """Message when cookie is successfully stored.""" - return "Cookie stored securely in system keyring" - - @staticmethod - def keyring_storage_failed() -> str: - """Warning when keyring storage fails.""" - return "Could not store credentials in system keyring" - - @staticmethod - def using_cookie_from(source: str) -> str: - """ - Message indicating cookie source. - - Args: - source: Source of the cookie (e.g., "environment", "keyring", "configuration") - - Returns: - str: Formatted message - """ - return f"Using LinkedIn cookie from {source}" diff --git a/linkedin_mcp_server/config/providers.py b/linkedin_mcp_server/config/providers.py deleted file mode 100644 index 0a05c7c1..00000000 --- a/linkedin_mcp_server/config/providers.py +++ /dev/null @@ -1,195 +0,0 @@ -# src/linkedin_mcp_server/config/providers.py -""" -Configuration providers for LinkedIn MCP Server. - -This module provides secure credential storage and retrieval using the system keyring, -as well as utility functions for Chrome driver path detection. It abstracts the -complexity of different keyring backends across macOS, Windows, and Linux. - -Key Functions: -- System keyring integration for LinkedIn credentials and cookies -- Chrome driver path detection across different operating systems -- Secure credential management with proper error handling -- Cross-platform compatibility with appropriate keyring backends -""" - -import logging -import os -import platform -from typing import Dict, List, Optional - -import keyring -from keyring.errors import KeyringError - -# Constants -SERVICE_NAME = "linkedin_mcp_server" -EMAIL_KEY = "linkedin_email" -PASSWORD_KEY = "linkedin_password" -COOKIE_KEY = "linkedin_cookie" - -logger = logging.getLogger(__name__) - - -def get_keyring_name() -> str: - """Get the name of the current keyring backend.""" - system = platform.system() - if system == "Darwin": - return "macOS Keychain" - elif system == "Windows": - return "Windows Credential Locker" - else: - return keyring.get_keyring().__class__.__name__ - - -def get_secret_from_keyring(key: str) -> Optional[str]: - """Retrieve a secret from system keyring.""" - try: - secret = keyring.get_password(SERVICE_NAME, key) - return secret - except KeyringError as e: - logger.error(f"Error accessing keyring for {key}: {e}") - return None - - -def set_secret_in_keyring(key: str, value: str) -> bool: - """Store a secret in system keyring.""" - try: - keyring.set_password(SERVICE_NAME, key, value) - logger.debug(f"Secret '{key}' stored successfully in {get_keyring_name()}") - return True - except KeyringError as e: - logger.error(f"Error storing secret '{key}': {e}") - return False - - -def get_credentials_from_keyring() -> Dict[str, Optional[str]]: - """Retrieve LinkedIn credentials from system keyring.""" - email = get_secret_from_keyring(EMAIL_KEY) - password = get_secret_from_keyring(PASSWORD_KEY) - - return {"email": email, "password": password} - - -def save_credentials_to_keyring(email: str, password: str) -> bool: - """Save LinkedIn credentials to system keyring.""" - email_saved = set_secret_in_keyring(EMAIL_KEY, email) - password_saved = set_secret_in_keyring(PASSWORD_KEY, password) - - return email_saved and password_saved - - -def clear_credentials_from_keyring() -> bool: - """Clear stored credentials from the keyring.""" - try: - keyring.delete_password(SERVICE_NAME, EMAIL_KEY) - keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) - logger.info(f"Credentials removed from {get_keyring_name()}") - return True - except KeyringError as e: - logger.error(f"Error clearing credentials: {e}") - return False - - -def get_cookie_from_keyring() -> Optional[str]: - """Retrieve LinkedIn cookie from system keyring.""" - return get_secret_from_keyring(COOKIE_KEY) - - -def save_cookie_to_keyring(cookie: str) -> bool: - """Save LinkedIn cookie to system keyring.""" - return set_secret_in_keyring(COOKIE_KEY, cookie) - - -def clear_cookie_from_keyring() -> bool: - """Clear stored cookie from the keyring.""" - try: - keyring.delete_password(SERVICE_NAME, COOKIE_KEY) - logger.info(f"Cookie removed from {get_keyring_name()}") - return True - except KeyringError as e: - logger.error(f"Error clearing cookie: {e}") - return False - - -def check_keychain_data_exists() -> Dict[str, bool]: - """Check what LinkedIn data exists in the keyring.""" - credentials = get_credentials_from_keyring() - cookie = get_cookie_from_keyring() - - return { - "has_email": credentials["email"] is not None, - "has_password": credentials["password"] is not None, - "has_cookie": cookie is not None, - "has_credentials": credentials["email"] is not None - or credentials["password"] is not None, - "has_any": credentials["email"] is not None - or credentials["password"] is not None - or cookie is not None, - } - - -def clear_existing_keychain_data() -> Dict[str, bool]: - """Clear only existing LinkedIn data from the keyring.""" - existing = check_keychain_data_exists() - results = {"credentials_cleared": False, "cookie_cleared": False} - - # Only try to clear credentials if they exist - if existing["has_credentials"]: - try: - if existing["has_email"]: - keyring.delete_password(SERVICE_NAME, EMAIL_KEY) - if existing["has_password"]: - keyring.delete_password(SERVICE_NAME, PASSWORD_KEY) - results["credentials_cleared"] = True - logger.info(f"Credentials removed from {get_keyring_name()}") - except KeyringError as e: - logger.error(f"Error clearing credentials: {e}") - else: - results["credentials_cleared"] = True # Nothing to clear = success - - # Only try to clear cookie if it exists - if existing["has_cookie"]: - try: - keyring.delete_password(SERVICE_NAME, COOKIE_KEY) - results["cookie_cleared"] = True - logger.info(f"Cookie removed from {get_keyring_name()}") - except KeyringError as e: - logger.error(f"Error clearing cookie: {e}") - else: - results["cookie_cleared"] = True # Nothing to clear = success - - return results - - -def clear_all_keychain_data() -> bool: - """Clear all stored LinkedIn data from the keyring (credentials + cookie).""" - results = clear_existing_keychain_data() - - if results["credentials_cleared"] and results["cookie_cleared"]: - logger.info(f"All LinkedIn data cleared from {get_keyring_name()}") - return True - else: - logger.error("Failed to clear some LinkedIn data from keyring") - return False - - -def get_chromedriver_paths() -> List[str]: - """Get possible ChromeDriver paths based on the platform.""" - paths = [ - os.path.join(os.path.dirname(__file__), "../../../../drivers/chromedriver"), - os.path.join(os.path.expanduser("~"), "chromedriver"), - "/usr/local/bin/chromedriver", - "/usr/bin/chromedriver", - "/opt/homebrew/bin/chromedriver", - "/Applications/chromedriver", - ] - - if platform.system() == "Windows": - paths.extend( - [ - "C:\\Program Files\\chromedriver.exe", - "C:\\Program Files (x86)\\chromedriver.exe", - ] - ) - - return paths diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 77f94639..d535381f 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -1,21 +1,12 @@ -# src/linkedin_mcp_server/config/schema.py """ Configuration schema definitions for LinkedIn MCP Server. -This module defines the dataclass schemas that represent the application's configuration -structure. It provides type-safe configuration objects with validation and default values -for all aspects of the server including Chrome driver settings, LinkedIn credentials, -and MCP server parameters. - -Key Components: -- ChromeConfig: Chrome driver and browser configuration -- LinkedInConfig: LinkedIn authentication and connection settings -- ServerConfig: MCP server transport and operational settings -- AppConfig: Main application configuration combining all components +Defines the dataclass schemas that represent the application's configuration +structure with type-safe configuration objects and default values. """ from dataclasses import dataclass, field -from typing import List, Literal, Optional +from typing import Literal, Optional class ConfigurationError(Exception): @@ -25,22 +16,14 @@ class ConfigurationError(Exception): @dataclass -class ChromeConfig: - """Configuration for Chrome driver.""" +class BrowserConfig: + """Configuration for browser settings.""" headless: bool = True - chromedriver_path: Optional[str] = None - browser_args: List[str] = field(default_factory=list) - user_agent: Optional[str] = None - - -@dataclass -class LinkedInConfig: - """LinkedIn connection configuration.""" - - email: Optional[str] = None - password: Optional[str] = None - cookie: Optional[str] = None + slow_mo: int = 0 # Milliseconds between browser actions (debugging) + user_agent: Optional[str] = None # Custom browser user agent + viewport_width: int = 1280 + viewport_height: int = 720 @dataclass @@ -48,39 +31,44 @@ class ServerConfig: """MCP server configuration.""" transport: Literal["stdio", "streamable-http"] = "stdio" - transport_explicitly_set: bool = False # Track if transport was explicitly set - lazy_init: bool = True + transport_explicitly_set: bool = False log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "WARNING" - get_cookie: bool = False - clear_keychain: bool = False + get_session: bool = False + session_output_path: Optional[str] = None + session_info: bool = False # Check session validity and exit + clear_session: bool = False + # Retry configuration + retry_attempts: int = 3 + retry_backoff: float = 2.0 # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 path: str = "/mcp" + # Cookie authentication + linkedin_cookie: Optional[str] = None @dataclass class AppConfig: """Main application configuration.""" - chrome: ChromeConfig = field(default_factory=ChromeConfig) - linkedin: LinkedInConfig = field(default_factory=LinkedInConfig) + browser: BrowserConfig = field(default_factory=BrowserConfig) server: ServerConfig = field(default_factory=ServerConfig) is_interactive: bool = field(default=False) def __post_init__(self) -> None: """Validate configuration after initialization.""" - self._validate_transport_config() + if self.server.transport == "streamable-http": + self._validate_transport_config() + self._validate_path_format() self._validate_port_range() - self._validate_path_format() def _validate_transport_config(self) -> None: """Validate transport configuration is consistent.""" - if self.server.transport == "streamable-http": - if not self.server.host: - raise ConfigurationError("HTTP transport requires a valid host") - if not self.server.port: - raise ConfigurationError("HTTP transport requires a valid port") + if not self.server.host: + raise ConfigurationError("HTTP transport requires a valid host") + if not self.server.port: + raise ConfigurationError("HTTP transport requires a valid port") def _validate_port_range(self) -> None: """Validate port is in valid range.""" @@ -91,12 +79,11 @@ def _validate_port_range(self) -> None: def _validate_path_format(self) -> None: """Validate path format for HTTP transport.""" - if self.server.transport == "streamable-http": - if not self.server.path.startswith("/"): - raise ConfigurationError( - f"HTTP path '{self.server.path}' must start with '/'" - ) - if len(self.server.path) < 2: - raise ConfigurationError( - f"HTTP path '{self.server.path}' must be at least 2 characters" - ) + if not self.server.path.startswith("/"): + raise ConfigurationError( + f"HTTP path '{self.server.path}' must start with '/'" + ) + if len(self.server.path) < 2: + raise ConfigurationError( + f"HTTP path '{self.server.path}' must be at least 2 characters" + ) diff --git a/linkedin_mcp_server/config/secrets.py b/linkedin_mcp_server/config/secrets.py deleted file mode 100644 index be7a2bd1..00000000 --- a/linkedin_mcp_server/config/secrets.py +++ /dev/null @@ -1,48 +0,0 @@ -# src/linkedin_mcp_server/config/secrets.py -""" -Interactive credential prompting and secure storage for LinkedIn MCP Server. - -This module handles interactive credential collection from users and securely stores -them in the system keyring. It provides a user-friendly interface for credential -input while ensuring security through proper keyring integration. - -Key Functions: -- Interactive credential prompting with secure password input -- Automatic storage of credentials in system keyring -- User-friendly error handling and feedback -- Integration with the keyring providers for secure storage -""" - -import logging -from typing import Dict - -import inquirer # type: ignore - - -from .providers import ( - get_keyring_name, - save_credentials_to_keyring, -) - -logger = logging.getLogger(__name__) - - -def prompt_for_credentials() -> Dict[str, str]: - """Prompt user for LinkedIn credentials and store them securely.""" - print(f"๐Ÿ”‘ LinkedIn credentials required (will be stored in {get_keyring_name()})") - questions = [ - inquirer.Text("email", message="LinkedIn Email"), - inquirer.Password("password", message="LinkedIn Password"), - ] - credentials: Dict[str, str] = inquirer.prompt(questions) - - if not credentials: - raise KeyboardInterrupt("Credential input was cancelled") - - # Store credentials securely in keyring - if save_credentials_to_keyring(credentials["email"], credentials["password"]): - logger.info("Credentials stored securely in keyring") - else: - logger.warning("Could not store credentials in system keyring") - - return credentials diff --git a/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py index 3d123cab..69efafc0 100644 --- a/linkedin_mcp_server/drivers/__init__.py +++ b/linkedin_mcp_server/drivers/__init__.py @@ -1,16 +1,36 @@ -# src/linkedin_mcp_server/drivers/__init__.py """ -Driver management package for LinkedIn scraping. +Browser management package for LinkedIn scraping. -This package provides Chrome WebDriver management and automation capabilities -for LinkedIn scraping. It implements a singleton pattern for driver instances +This package provides Playwright browser management using linkedin_scraper v3's +BrowserManager. It implements a singleton pattern for browser instances to ensure session persistence across multiple tool calls while handling authentication, session management, and proper resource cleanup. Key Components: -- Chrome WebDriver initialization and configuration -- LinkedIn authentication and session management -- Singleton pattern for driver reuse across tools -- Automatic driver cleanup and resource management -- Cross-platform Chrome driver detection and setup +- Playwright browser initialization via BrowserManager +- LinkedIn authentication with session persistence +- Singleton pattern for browser reuse across tools +- Automatic cleanup and resource management """ + +from linkedin_mcp_server.drivers.browser import ( + DEFAULT_SESSION_PATH, + check_rate_limit, + close_browser, + ensure_authenticated, + get_or_create_browser, + session_exists, + set_headless, + validate_session, +) + +__all__ = [ + "DEFAULT_SESSION_PATH", + "check_rate_limit", + "close_browser", + "ensure_authenticated", + "get_or_create_browser", + "session_exists", + "set_headless", + "validate_session", +] diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py new file mode 100644 index 00000000..9653b65b --- /dev/null +++ b/linkedin_mcp_server/drivers/browser.py @@ -0,0 +1,155 @@ +""" +Playwright browser management for LinkedIn scraping. + +This module provides async browser lifecycle management using linkedin_scraper v3's +BrowserManager. Implements a singleton pattern for browser reuse across tool calls +with session persistence via JSON files. +""" + +import logging +from pathlib import Path +from typing import Optional, cast + +from linkedin_scraper import ( + AuthenticationError, + BrowserManager, + is_logged_in, + login_with_cookie, +) +from linkedin_scraper.core import detect_rate_limit, warm_up_browser + +logger = logging.getLogger(__name__) + + +def _get_linkedin_cookie() -> Optional[str]: + """Get LinkedIn cookie from environment variable.""" + import os + + return os.environ.get("LINKEDIN_COOKIE") + + +# Default session file location +DEFAULT_SESSION_PATH = Path.home() / ".linkedin-mcp" / "session.json" + +# Global browser instance (singleton) +_browser: Optional[BrowserManager] = None +_headless: bool = True + + +async def get_or_create_browser( + headless: Optional[bool] = None, + session_path: Optional[Path] = None, +) -> BrowserManager: + """ + Get existing browser or create and initialize a new one. + + Uses a singleton pattern to reuse the browser across tool calls. + Loads session from file if available. + + Args: + headless: Run browser in headless mode. Defaults to config value. + session_path: Path to session file. Defaults to ~/.linkedin-mcp/session.json + + Returns: + Initialized BrowserManager instance + """ + global _browser, _headless + + if headless is not None: + _headless = headless + + if session_path is None: + session_path = DEFAULT_SESSION_PATH + + if _browser is not None: + return cast(BrowserManager, _browser) + + logger.info(f"Creating new browser (headless={_headless})") + _browser = BrowserManager(headless=_headless) + await _browser.start() + + # Priority 1: Load session file if available + if session_path.exists(): + try: + await _browser.load_session(str(session_path)) + logger.info(f"Loaded session from {session_path}") + return _browser + except Exception as e: + logger.warning(f"Failed to load session: {e}") + + # Priority 2: Use cookie from environment + if cookie := _get_linkedin_cookie(): + try: + await login_with_cookie(_browser.page, cookie) + logger.info("Authenticated using LINKEDIN_COOKIE") + return _browser + except Exception as e: + logger.warning(f"Cookie authentication failed: {e}") + + # No auth available - warm up for manual login + logger.info("No authentication found, warming up browser...") + await warm_up_browser(_browser.page) + + return _browser + + +async def close_browser() -> None: + """Close the browser and cleanup resources.""" + global _browser + + if _browser is not None: + browser = cast(BrowserManager, _browser) + logger.info("Closing browser...") + await browser.close() + _browser = None + logger.info("Browser closed") + + +def session_exists(session_path: Optional[Path] = None) -> bool: + """Check if a session file exists.""" + if session_path is None: + session_path = DEFAULT_SESSION_PATH + return session_path.exists() + + +def set_headless(headless: bool) -> None: + """Set headless mode for future browser creation.""" + global _headless + _headless = headless + + +async def validate_session() -> bool: + """ + Check if the current session is still valid (logged in). + + Returns: + True if session is valid and user is logged in + """ + browser = await get_or_create_browser() + return await is_logged_in(browser.page) + + +async def ensure_authenticated() -> None: + """ + Validate session and raise if expired. + + Raises: + AuthenticationError: If session is expired or invalid + """ + if not await validate_session(): + raise AuthenticationError( + "Session expired or invalid. Run with --get-session to re-authenticate." + ) + + +async def check_rate_limit() -> None: + """ + Proactively check for rate limiting. + + Should be called after navigation to detect if LinkedIn is blocking requests. + + Raises: + RateLimitError: If rate limiting is detected + """ + browser = await get_or_create_browser() + await detect_rate_limit(browser.page) diff --git a/linkedin_mcp_server/drivers/chrome.py b/linkedin_mcp_server/drivers/chrome.py deleted file mode 100644 index 56a14bf4..00000000 --- a/linkedin_mcp_server/drivers/chrome.py +++ /dev/null @@ -1,458 +0,0 @@ -# linkedin_mcp_server/drivers/chrome.py -""" -Chrome WebDriver management for LinkedIn scraping with session persistence. - -Handles Chrome WebDriver creation, configuration, authentication, and lifecycle management. -Implements singleton pattern for driver reuse across tools with automatic cleanup. -Provides cookie-based authentication and comprehensive error handling. -""" - -import logging -import os -import platform -from typing import Dict, Optional - -from linkedin_scraper.exceptions import ( - CaptchaRequiredError, - InvalidCredentialsError, - LoginTimeoutError, - RateLimitError, - SecurityChallengeError, - TwoFactorAuthError, -) -from selenium import webdriver -from selenium.common.exceptions import WebDriverException -from selenium.webdriver.chrome.options import Options -from selenium.webdriver.chrome.service import Service - -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.exceptions import DriverInitializationError - - -# Constants -def get_default_user_agent() -> str: - """Get platform-specific default user agent to reduce fingerprinting.""" - system = platform.system() - - if system == "Windows": - return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" - elif system == "Darwin": # macOS - return "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" - else: # Linux and others - return "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" - - -# Global driver storage to reuse sessions -active_drivers: Dict[str, webdriver.Chrome] = {} - - -logger = logging.getLogger(__name__) - - -def create_chrome_options(config) -> Options: - """ - Create Chrome options with all necessary configuration for LinkedIn scraping. - - Args: - config: AppConfig instance with Chrome configuration - - Returns: - Options: Configured Chrome options object - """ - chrome_options = Options() - - logger.info( - f"Running browser in {'headless' if config.chrome.headless else 'visible'} mode" - ) - if config.chrome.headless: - chrome_options.add_argument("--headless=new") - - # Add essential options for stability - chrome_options.add_argument("--no-sandbox") - chrome_options.add_argument("--disable-dev-shm-usage") - chrome_options.add_argument("--disable-gpu") - chrome_options.add_argument("--window-size=1920,1080") - chrome_options.add_argument("--disable-extensions") - chrome_options.add_argument("--disable-background-timer-throttling") - chrome_options.add_argument("--disable-background-networking") - chrome_options.add_argument("--disable-default-apps") - chrome_options.add_argument("--disable-sync") - chrome_options.add_argument("--metrics-recording-only") - chrome_options.add_argument("--no-default-browser-check") - chrome_options.add_argument("--no-first-run") - chrome_options.add_argument("--disable-features=TranslateUI,BlinkGenPropertyTrees") - chrome_options.add_argument("--aggressive-cache-discard") - chrome_options.add_argument("--disable-ipc-flooding-protection") - - # Set user agent (configurable with platform-specific default) - user_agent = config.chrome.user_agent or get_default_user_agent() - chrome_options.add_argument(f"--user-agent={user_agent}") - - # Add any custom browser arguments from config - for arg in config.chrome.browser_args: - chrome_options.add_argument(arg) - - return chrome_options - - -def create_chrome_service(config): - """ - Create Chrome service with ChromeDriver path resolution. - - Args: - config: AppConfig instance with Chrome configuration - - Returns: - Service or None: Chrome service if path is configured, None for auto-detection - """ - # Use ChromeDriver path from environment or config - chromedriver_path = ( - os.environ.get("CHROMEDRIVER_PATH") or config.chrome.chromedriver_path - ) - - if chromedriver_path: - logger.info(f"Using ChromeDriver at path: {chromedriver_path}") - return Service(executable_path=chromedriver_path) - else: - logger.info("Using auto-detected ChromeDriver") - return None - - -def create_temporary_chrome_driver() -> webdriver.Chrome: - """ - Create a temporary Chrome WebDriver instance for one-off operations. - - This driver is NOT stored in the global active_drivers dict and should be - manually cleaned up by the caller. - - Returns: - webdriver.Chrome: Configured Chrome WebDriver instance - - Raises: - WebDriverException: If driver creation fails - """ - config = get_config() - - logger.info("Creating temporary Chrome WebDriver...") - - # Create Chrome options using shared function - chrome_options = create_chrome_options(config) - - # Create Chrome service using shared function - service = create_chrome_service(config) - - # Initialize Chrome driver - if service: - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - driver = webdriver.Chrome(options=chrome_options) - - logger.info("Temporary Chrome WebDriver created successfully") - - # Add a page load timeout for safety - driver.set_page_load_timeout(60) - - # Set shorter implicit wait for faster operations - driver.implicitly_wait(10) - - return driver - - -def create_chrome_driver() -> webdriver.Chrome: - """ - Create a new Chrome WebDriver instance with proper configuration. - - Returns: - webdriver.Chrome: Configured Chrome WebDriver instance - - Raises: - WebDriverException: If driver creation fails - """ - config = get_config() - - logger.info("Initializing Chrome WebDriver...") - - # Create Chrome options using shared function - chrome_options = create_chrome_options(config) - - # Create Chrome service using shared function - service = create_chrome_service(config) - - # Initialize Chrome driver - if service: - driver = webdriver.Chrome(service=service, options=chrome_options) - else: - driver = webdriver.Chrome(options=chrome_options) - - logger.info("Chrome WebDriver initialized successfully") - - # Add a page load timeout for safety - driver.set_page_load_timeout(60) - - # Set shorter implicit wait for faster cookie validation - driver.implicitly_wait(10) - - return driver - - -def login_with_cookie(driver: webdriver.Chrome, cookie: str) -> bool: - """ - Log in to LinkedIn using session cookie. - - Args: - driver: Chrome WebDriver instance - cookie: LinkedIn session cookie - - Returns: - bool: True if login was successful, False otherwise - """ - import time - - try: - from linkedin_scraper import actions # type: ignore - from selenium.common.exceptions import TimeoutException - - logger.info("Attempting cookie authentication...") - - # Set longer timeout to handle slow LinkedIn loading - # Invalid cookies cause indefinite loading, so timeout is our detection mechanism - driver.set_page_load_timeout(45) - - # Attempt login - retry_count = 0 - max_retries = 1 - - while retry_count <= max_retries: - try: - actions.login(driver, cookie=cookie) - # If we reach here without timeout, login attempt completed - break - except TimeoutException: - # Timeout indicates invalid cookie (page loads forever) - logger.warning( - "Cookie authentication failed - page load timeout (likely invalid cookie)" - ) - return False - except Exception as e: - # Handle InvalidCredentialsError from linkedin-scraper - # This library sometimes incorrectly reports failure even when login succeeds - if "InvalidCredentialsError" in str( - type(e) - ) or "Cookie login failed" in str(e): - logger.info( - "LinkedIn-scraper reported InvalidCredentialsError - verifying actual authentication status..." - ) - # Give LinkedIn time to complete redirect - time.sleep(2) - break - else: - logger.warning(f"Login attempt failed: {e}") - if retry_count < max_retries: - retry_count += 1 - logger.info( - f"Retrying authentication (attempt {retry_count + 1}/{max_retries + 1})" - ) - time.sleep(2) - continue - else: - return False - - # Check authentication status by examining the current URL - try: - current_url = driver.current_url - - # Check if we're on login page (authentication failed) - if "login" in current_url or "uas/login" in current_url: - logger.warning( - "Cookie authentication failed - redirected to login page" - ) - return False - - # Check if we're on authenticated pages (authentication succeeded) - elif any( - indicator in current_url - for indicator in ["feed", "mynetwork", "linkedin.com/in/", "/feed/"] - ): - logger.info("Cookie authentication successful") - return True - - # Unexpected page - wait briefly and check again - else: - logger.info( - "Unexpected page after login, checking authentication status..." - ) - time.sleep(2) - - final_url = driver.current_url - if "login" in final_url or "uas/login" in final_url: - logger.warning("Cookie authentication failed - ended on login page") - return False - elif any( - indicator in final_url - for indicator in ["feed", "mynetwork", "linkedin.com/in/", "/feed/"] - ): - logger.info("Cookie authentication successful after verification") - return True - else: - logger.warning( - f"Cookie authentication uncertain - unexpected final page: {final_url}" - ) - return False - - except Exception as e: - logger.error(f"Error checking authentication status: {e}") - return False - - except Exception as e: - logger.error(f"Cookie authentication failed with error: {e}") - return False - finally: - # Restore normal timeout - driver.set_page_load_timeout(60) - - -def login_to_linkedin(driver: webdriver.Chrome, authentication: str) -> None: - """ - Log in to LinkedIn using provided authentication. - - Args: - driver: Chrome WebDriver instance - authentication: LinkedIn session cookie - - Raises: - Various login-related errors from linkedin-scraper or this module - """ - # Try cookie authentication - if login_with_cookie(driver, authentication): - logger.info("Successfully logged in to LinkedIn using cookie") - return - - # If we get here, cookie authentication failed - logger.error("Cookie authentication failed") - - # Clear invalid cookie from keyring - from linkedin_mcp_server.authentication import clear_authentication - - clear_authentication() - logger.info("Cleared invalid cookie from authentication storage") - - # Check current page to determine the issue - try: - current_url: str = driver.current_url - - if "checkpoint/challenge" in current_url: - if "security check" in driver.page_source.lower(): - raise SecurityChallengeError( - challenge_url=current_url, - message="LinkedIn requires a security challenge. Please complete it manually and restart the application.", - ) - else: - raise CaptchaRequiredError(captcha_url=current_url) - else: - raise InvalidCredentialsError( - "Cookie authentication failed - cookie may be expired or invalid" - ) - - except Exception as e: - # If we can't determine the specific error, raise a generic one - raise LoginTimeoutError(f"Login failed: {str(e)}") - - -def get_or_create_driver(authentication: str) -> webdriver.Chrome: - """ - Get existing driver or create a new one and login. - - Args: - authentication: LinkedIn session cookie for login - - Returns: - webdriver.Chrome: Chrome WebDriver instance, logged in and ready - - Raises: - DriverInitializationError: If driver creation fails - Various login-related errors: If login fails - """ - session_id = "default" # We use a single session for simplicity - - # Return existing driver if available - if session_id in active_drivers: - logger.info("Using existing Chrome WebDriver session") - return active_drivers[session_id] - - try: - # Create new driver - driver = create_chrome_driver() - - # Login to LinkedIn - login_to_linkedin(driver, authentication) - - # Store successful driver - active_drivers[session_id] = driver - logger.info("Chrome WebDriver session created and authenticated successfully") - - return driver - - except WebDriverException as e: - error_msg = f"Error creating web driver: {e}" - logger.error(error_msg) - raise DriverInitializationError(error_msg) - except ( - CaptchaRequiredError, - InvalidCredentialsError, - SecurityChallengeError, - TwoFactorAuthError, - RateLimitError, - LoginTimeoutError, - ) as e: - # Login-related errors - clean up driver if it was created - if session_id in active_drivers: - active_drivers[session_id].quit() - del active_drivers[session_id] - raise e - - -def close_all_drivers() -> None: - """Close all active drivers and clean up resources.""" - global active_drivers - - for session_id, driver in active_drivers.items(): - try: - logger.info(f"Closing Chrome WebDriver session: {session_id}") - driver.quit() - except Exception as e: - logger.warning(f"Error closing driver {session_id}: {e}") - - active_drivers.clear() - logger.info("All Chrome WebDriver sessions closed") - - -def get_active_driver() -> Optional[webdriver.Chrome]: - """ - Get the currently active driver without creating a new one. - - Returns: - Optional[webdriver.Chrome]: Active driver if available, None otherwise - """ - session_id = "default" - return active_drivers.get(session_id) - - -def capture_session_cookie(driver: webdriver.Chrome) -> Optional[str]: - """ - Capture LinkedIn session cookie from driver. - - Args: - driver: Chrome WebDriver instance - - Returns: - Optional[str]: Session cookie if found, None otherwise - """ - try: - # Get li_at cookie which is the main LinkedIn session cookie - cookie = driver.get_cookie("li_at") - if cookie and cookie.get("value"): - return f"li_at={cookie['value']}" - return None - except Exception as e: - logger.warning(f"Failed to capture session cookie: {e}") - return None diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index e2f4cc02..679c3aab 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -1,29 +1,32 @@ -# src/linkedin_mcp_server/error_handler.py """ Centralized error handling for LinkedIn MCP Server with structured responses. Provides DRY approach to error handling across all tools with consistent MCP response format, specific LinkedIn error categorization, and proper logging integration. -Eliminates code duplication while ensuring user-friendly error messages. """ import logging from typing import Any, Dict, List -from linkedin_scraper.exceptions import ( - CaptchaRequiredError, - InvalidCredentialsError, - LoginTimeoutError, +from linkedin_scraper.core.exceptions import ( + AuthenticationError, + ElementNotFoundError, + LinkedInScraperException, + NetworkError, + ProfileNotFoundError, RateLimitError, - SecurityChallengeError, - TwoFactorAuthError, + ScrapingError, ) from linkedin_mcp_server.exceptions import ( + CookieAuthenticationError, CredentialsNotFoundError, LinkedInMCPError, + SessionExpiredError, ) +logger = logging.getLogger(__name__) + def handle_tool_error(exception: Exception, context: str = "") -> Dict[str, Any]: """ @@ -52,7 +55,7 @@ def handle_tool_error_list( Returns: List containing structured error response dictionary """ - return convert_exception_to_list_response(exception, context) + return [convert_exception_to_response(exception, context)] def convert_exception_to_response( @@ -72,59 +75,81 @@ def convert_exception_to_response( return { "error": "authentication_not_found", "message": str(exception), - "resolution": "Provide LinkedIn cookie via LINKEDIN_COOKIE environment variable or run setup", + "resolution": "Run with --get-session to create a session file", } - elif isinstance(exception, InvalidCredentialsError): + elif isinstance(exception, SessionExpiredError): return { - "error": "invalid_credentials", + "error": "session_expired", "message": str(exception), - "resolution": "Check your LinkedIn email and password", + "resolution": "Run with --get-session to create a new session", } - elif isinstance(exception, CaptchaRequiredError): + elif isinstance(exception, CookieAuthenticationError): return { - "error": "captcha_required", + "error": "cookie_auth_failed", "message": str(exception), - "captcha_url": exception.captcha_url, - "resolution": "Complete the captcha challenge manually", + "resolution": "Check your LINKEDIN_COOKIE value or create a session file", } - elif isinstance(exception, SecurityChallengeError): + elif isinstance(exception, AuthenticationError): return { - "error": "security_challenge_required", + "error": "authentication_failed", "message": str(exception), - "challenge_url": getattr(exception, "challenge_url", None), - "resolution": "Complete the security challenge manually", + "resolution": "Check your LinkedIn session. You may need to re-authenticate or complete a security challenge.", } - elif isinstance(exception, TwoFactorAuthError): + elif isinstance(exception, RateLimitError): + wait_time = getattr(exception, "suggested_wait_time", 300) return { - "error": "two_factor_auth_required", + "error": "rate_limit", "message": str(exception), - "resolution": "Complete 2FA verification", + "suggested_wait_seconds": wait_time, + "resolution": f"LinkedIn rate limit detected. Wait {wait_time} seconds before trying again.", } - elif isinstance(exception, RateLimitError): + elif isinstance(exception, ProfileNotFoundError): return { - "error": "rate_limit", + "error": "profile_not_found", "message": str(exception), - "resolution": "Wait before attempting to login again", + "resolution": "Check the profile URL is correct and the profile exists.", } - elif isinstance(exception, LoginTimeoutError): + elif isinstance(exception, ElementNotFoundError): return { - "error": "login_timeout", + "error": "element_not_found", + "message": str(exception), + "resolution": "LinkedIn page structure may have changed. Please report this issue.", + } + + elif isinstance(exception, NetworkError): + return { + "error": "network_error", + "message": str(exception), + "resolution": "Check your network connection and try again.", + } + + elif isinstance(exception, ScrapingError): + return { + "error": "scraping_error", + "message": str(exception), + "resolution": "Failed to extract data from LinkedIn. The page structure may have changed.", + } + + elif isinstance(exception, LinkedInScraperException): + return { + "error": "linkedin_scraper_error", "message": str(exception), - "resolution": "Check network connection and try again", } elif isinstance(exception, LinkedInMCPError): - return {"error": "linkedin_error", "message": str(exception)} + return { + "error": "linkedin_mcp_error", + "message": str(exception), + } else: # Generic error handling with structured logging - logger = logging.getLogger(__name__) logger.error( f"Error in {context}: {exception}", extra={ @@ -137,44 +162,3 @@ def convert_exception_to_response( "error": "unknown_error", "message": f"Failed to execute {context}: {str(exception)}", } - - -def convert_exception_to_list_response( - exception: Exception, context: str = "" -) -> List[Dict[str, Any]]: - """ - Convert an exception to a list-formatted structured MCP response. - - Some tools return lists, so this provides the same error handling - but wrapped in a list format. - - Args: - exception: The exception to convert - context: Additional context about where the error occurred - - Returns: - List containing single structured error response dictionary - """ - return [convert_exception_to_response(exception, context)] - - -def safe_get_driver(): - """ - Safely get or create a driver with proper error handling. - - Returns: - Driver instance - - Raises: - LinkedInMCPError: If driver initialization fails - """ - from linkedin_mcp_server.authentication import ensure_authentication - from linkedin_mcp_server.drivers.chrome import get_or_create_driver - - # Get authentication first - authentication = ensure_authentication() - - # Create driver with authentication - driver = get_or_create_driver(authentication) - - return driver diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py index d3f48425..54dffe79 100644 --- a/linkedin_mcp_server/exceptions.py +++ b/linkedin_mcp_server/exceptions.py @@ -3,8 +3,7 @@ Custom exceptions for LinkedIn MCP Server with specific error categorization. Defines hierarchical exception types for different error scenarios including -authentication failures, driver initialization issues, and MCP client reporting. -Provides structured error handling for better debugging and user experience. +authentication failures and MCP client reporting. """ @@ -20,7 +19,27 @@ class CredentialsNotFoundError(LinkedInMCPError): pass -class DriverInitializationError(LinkedInMCPError): - """Failed to initialize Chrome WebDriver.""" +class SessionExpiredError(LinkedInMCPError): + """Session has expired and needs to be refreshed.""" - pass + def __init__(self, message: str | None = None): + default_msg = ( + "LinkedIn session has expired.\n\n" + "To fix this:\n" + " 1. Run with --get-session to create a new session\n" + " 2. Or set a fresh LINKEDIN_COOKIE environment variable" + ) + super().__init__(message or default_msg) + + +class CookieAuthenticationError(LinkedInMCPError): + """Cookie-based authentication failed.""" + + def __init__(self, message: str | None = None): + default_msg = ( + "Cookie authentication failed. The cookie may be:\n" + " - Expired (cookies typically last 1-7 days)\n" + " - Invalid (check the format)\n" + " - From a different account" + ) + super().__init__(message or default_msg) diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index a3448260..285e4cb3 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -108,7 +108,6 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> root_logger.addHandler(console_handler) # Set specific loggers to reduce noise - logging.getLogger("selenium").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) logging.getLogger("fakeredis").setLevel(logging.WARNING) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 2f488445..39126fd8 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -1,18 +1,17 @@ -# src/linkedin_mcp_server/server.py """ FastMCP server implementation for LinkedIn integration with tool registration. Creates and configures the MCP server with comprehensive LinkedIn tool suite including person profiles, company data, job information, and session management capabilities. -Provides clean shutdown handling and resource cleanup. """ import logging -from typing import Any, Dict +from contextlib import asynccontextmanager +from typing import Any, AsyncIterator, Dict from fastmcp import FastMCP -from mcp.types import ToolAnnotations +from linkedin_mcp_server.drivers.browser import close_browser from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools @@ -20,9 +19,18 @@ logger = logging.getLogger(__name__) +@asynccontextmanager +async def lifespan(app: FastMCP) -> AsyncIterator[None]: + """Manage server lifecycle - cleanup browser on shutdown.""" + logger.info("LinkedIn MCP Server starting...") + yield + logger.info("LinkedIn MCP Server shutting down...") + await close_browser() + + def create_mcp_server() -> FastMCP: """Create and configure the MCP server with all LinkedIn tools.""" - mcp = FastMCP("linkedin_scraper") + mcp = FastMCP("linkedin_scraper", lifespan=lifespan) # Register all tools register_person_tools(mcp) @@ -30,20 +38,11 @@ def create_mcp_server() -> FastMCP: register_job_tools(mcp) # Register session management tool - @mcp.tool( - annotations=ToolAnnotations( - title="Close Session", - readOnlyHint=False, - destructiveHint=False, - openWorldHint=False, - ) - ) + @mcp.tool() async def close_session() -> Dict[str, Any]: """Close the current browser session and clean up resources.""" - from linkedin_mcp_server.drivers.chrome import close_all_drivers - try: - close_all_drivers() + await close_browser() return { "status": "success", "message": "Successfully closed the browser session and cleaned up resources", @@ -55,10 +54,3 @@ async def close_session() -> Dict[str, Any]: } return mcp - - -def shutdown_handler() -> None: - """Clean up resources on shutdown.""" - from linkedin_mcp_server.drivers.chrome import close_all_drivers - - close_all_drivers() diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 408478e0..ae6a2aad 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -1,305 +1,99 @@ -# linkedin_mcp_server/setup.py """ -Interactive setup flows for LinkedIn MCP Server authentication configuration. +Interactive setup flows for LinkedIn MCP Server authentication. -Handles credential collection, cookie extraction, validation, and secure storage -with multiple authentication methods including cookie input and credential-based login. -Provides temporary driver management and comprehensive retry logic. +Handles session creation through interactive browser login using Playwright. +Uses linkedin_scraper v3's wait_for_manual_login for authentication. """ +import asyncio import logging -from contextlib import contextmanager -from typing import Dict, Iterator +from pathlib import Path +from typing import Optional -import inquirer -from selenium import webdriver +from linkedin_scraper import BrowserManager, wait_for_manual_login -from linkedin_mcp_server.authentication import store_authentication -from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.config.messages import ErrorMessages, InfoMessages -from linkedin_mcp_server.config.providers import ( - get_credentials_from_keyring, - save_credentials_to_keyring, -) -from linkedin_mcp_server.config.schema import AppConfig -from linkedin_mcp_server.exceptions import CredentialsNotFoundError +from linkedin_mcp_server.drivers.browser import DEFAULT_SESSION_PATH logger = logging.getLogger(__name__) -def get_credentials_for_setup() -> Dict[str, str]: +async def interactive_login_and_save(session_path: Optional[Path] = None) -> bool: """ - Get LinkedIn credentials for setup purposes. - - Returns: - Dict[str, str]: Dictionary with email and password - - Raises: - CredentialsNotFoundError: If credentials cannot be obtained - """ - config = get_config() - - # First, try configuration (includes environment variables) - if config.linkedin.email and config.linkedin.password: - logger.info("Using LinkedIn credentials from configuration") - return {"email": config.linkedin.email, "password": config.linkedin.password} - - # Second, try keyring - credentials = get_credentials_from_keyring() - if credentials["email"] and credentials["password"]: - logger.info("Using LinkedIn credentials from keyring") - return {"email": credentials["email"], "password": credentials["password"]} - - # If in non-interactive mode and no credentials found, raise error - if not config.is_interactive: - raise CredentialsNotFoundError(ErrorMessages.no_credentials_found()) - - # Otherwise, prompt for credentials - return prompt_for_credentials() + Open browser for manual LinkedIn login and save session. + Opens a non-headless browser, navigates to LinkedIn login page, + and waits for user to complete authentication (including 2FA, captcha, etc.). -def prompt_for_credentials() -> Dict[str, str]: - """ - Prompt user for LinkedIn credentials. + Args: + session_path: Path to save session. Defaults to ~/.linkedin-mcp/session.json Returns: - Dict[str, str]: Dictionary with email and password + True if login was successful and session was saved Raises: - KeyboardInterrupt: If user cancels input + Exception: If login fails or times out """ - print("๐Ÿ”‘ LinkedIn credentials required for setup") - questions = [ - inquirer.Text("email", message="LinkedIn Email"), - inquirer.Password("password", message="LinkedIn Password"), - ] - credentials: Dict[str, str] = inquirer.prompt(questions) + if session_path is None: + session_path = DEFAULT_SESSION_PATH - if not credentials: - raise KeyboardInterrupt("Credential input was cancelled") + print("๐Ÿ”— Opening browser for LinkedIn login...") + print(" Please log in manually. You have 5 minutes to complete authentication.") + print(" (This handles 2FA, captcha, and any security challenges)") - # Store credentials securely in keyring - if save_credentials_to_keyring(credentials["email"], credentials["password"]): - logger.info(InfoMessages.credentials_stored_securely()) - else: - logger.warning(InfoMessages.keyring_storage_failed()) + async with BrowserManager(headless=False) as browser: + # Navigate to LinkedIn login + await browser.page.goto("https://www.linkedin.com/login") - return credentials + # Wait for manual login completion (5 minute timeout) + await wait_for_manual_login(browser.page, timeout=300000) + # Save session for future use + session_path.parent.mkdir(parents=True, exist_ok=True) + await browser.save_session(str(session_path)) -@contextmanager -def temporary_chrome_driver() -> Iterator[webdriver.Chrome]: - """ - Context manager for creating temporary Chrome driver with automatic cleanup. + print(f"โœ… Session saved to {session_path}") + return True - Yields: - webdriver.Chrome: Configured Chrome WebDriver instance - Raises: - Exception: If driver creation fails +def run_session_creation(output_path: Optional[str] = None) -> bool: """ - from linkedin_mcp_server.drivers.chrome import create_temporary_chrome_driver - - driver = None - try: - # Create temporary driver using shared function - driver = create_temporary_chrome_driver() - yield driver - finally: - if driver: - driver.quit() - - -def capture_cookie_from_credentials(email: str, password: str) -> str: - """ - Login with credentials and capture session cookie using temporary driver. + Create session via interactive login and save to file. Args: - email: LinkedIn email - password: LinkedIn password + output_path: Path to save session file. Defaults to ~/.linkedin-mcp/session.json Returns: - str: Captured session cookie - - Raises: - Exception: If login or cookie capture fails + True if session was created successfully """ - with temporary_chrome_driver() as driver: - # Login using linkedin-scraper - from linkedin_scraper import actions - - config: AppConfig = get_config() - interactive: bool = config.is_interactive - logger.info(f"Logging in to LinkedIn... Interactive: {interactive}") - actions.login( - driver, - email, - password, - timeout=60, # longer timeout for login (captcha, mobile verification, etc.) - interactive=interactive, # type: ignore # Respect configuration setting - ) - - # Capture cookie - cookie_obj: Dict[str, str] = driver.get_cookie("li_at") - if cookie_obj and cookie_obj.get("value"): - cookie: str = cookie_obj["value"] - logger.info("Successfully captured session cookie") - return cookie - else: - raise Exception("Failed to capture session cookie from browser") - + # Expand ~ in path + if output_path: + session_path = Path(output_path).expanduser() + else: + session_path = DEFAULT_SESSION_PATH -def test_cookie_validity(cookie: str) -> bool: - """ - Test if a cookie is valid by attempting to use it with a temporary driver. + print("๐Ÿ”— LinkedIn MCP Server - Session Creation") + print(f" Session will be saved to: {session_path}") - Args: - cookie: LinkedIn session cookie to test - - Returns: - bool: True if cookie is valid, False otherwise - """ try: - with temporary_chrome_driver() as driver: - from linkedin_mcp_server.drivers.chrome import login_with_cookie - - return login_with_cookie(driver, cookie) + success = asyncio.run(interactive_login_and_save(session_path)) + return success except Exception as e: - logger.warning(f"Cookie validation failed: {e}") + print(f"โŒ Session creation failed: {e}") return False -def prompt_for_cookie() -> str: +def run_interactive_setup() -> bool: """ - Prompt user to input LinkedIn cookie directly. + Run interactive setup - browser login only. Returns: - str: LinkedIn session cookie - - Raises: - KeyboardInterrupt: If user cancels input - ValueError: If cookie format is invalid - """ - print("๐Ÿช Please provide your LinkedIn session cookie") - cookie = inquirer.text("LinkedIn Cookie") - - if not cookie: - raise KeyboardInterrupt("Cookie input was cancelled") - - # Normalize cookie format - if cookie.startswith("li_at="): - cookie: str = cookie.split("li_at=")[1] - - return cookie - - -def run_interactive_setup() -> str: - """ - Run interactive setup to configure authentication. - - Returns: - str: Configured LinkedIn session cookie - - Raises: - Exception: If setup fails + True if setup completed successfully """ print("๐Ÿ”— LinkedIn MCP Server Setup") - print("Choose how you'd like to authenticate:") - - # Ask user for setup method - setup_method = inquirer.list_input( - "Setup method", - choices=[ - ("I have a LinkedIn cookie", "cookie"), - ("Login with email/password to get cookie", "credentials"), - ], - default="cookie", - ) - - if setup_method == "cookie": - # User provides cookie directly - cookie = prompt_for_cookie() - - # Test the cookie with a temporary driver - print("๐Ÿ” Testing provided cookie...") - if test_cookie_validity(cookie): - # Store the valid cookie - store_authentication(cookie) - logger.info("โœ… Authentication configured successfully") - return cookie - else: - print("โŒ The provided cookie is invalid or expired") - retry = inquirer.confirm( - "Would you like to try with email/password instead?", default=True - ) - if not retry: - raise Exception("Setup cancelled - invalid cookie provided") - - # Fall through to credentials flow - setup_method = "credentials" - - if setup_method == "credentials": - # Get credentials and attempt login with retry - max_retries = 3 - for attempt in range(max_retries): - try: - credentials = get_credentials_for_setup() - - print("๐Ÿ”‘ Logging in to capture session cookie...") - cookie = capture_cookie_from_credentials( - credentials["email"], credentials["password"] - ) - - # Store the captured cookie - store_authentication(cookie) - logger.info("โœ… Authentication configured successfully") - return cookie - - except Exception as e: - logger.error(f"Login failed: {e}") - print(f"โŒ Login failed: {e}") + print(" Opening browser for manual login...") - if attempt < max_retries - 1: - retry = inquirer.confirm( - "Would you like to try with different credentials?", - default=True, - ) - if not retry: - break - # Clear stored credentials to prompt for new ones - from linkedin_mcp_server.config.providers import ( - clear_credentials_from_keyring, - ) - - clear_credentials_from_keyring() - else: - raise Exception(f"Setup failed after {max_retries} attempts") - - raise Exception("Setup cancelled by user") - - # This should never be reached, but ensures type checker knows all paths are covered - raise Exception("Unexpected setup flow completion") - - -def run_cookie_extraction_setup() -> str: - """ - Run setup specifically for cookie extraction (--get-cookie mode). - - Returns: - str: Captured LinkedIn session cookie for display - - Raises: - Exception: If setup fails - """ - logger.info("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction mode started") - print("๐Ÿ”— LinkedIn MCP Server - Cookie Extraction") - - # Get credentials - credentials: Dict[str, str] = get_credentials_for_setup() - - # Capture cookie - cookie: str = capture_cookie_from_credentials( - credentials["email"], credentials["password"] - ) - - return cookie + try: + return asyncio.run(interactive_login_and_save()) + except Exception as e: + print(f"โŒ Login failed: {e}") + return False diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index c2a70077..5bd99dad 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -1,19 +1,23 @@ -# src/linkedin_mcp_server/tools/company.py """ -LinkedIn company profile scraping tools with employee data extraction. +LinkedIn company profile scraping tools. -Provides MCP tools for extracting company information, employee lists, and company -insights from LinkedIn with configurable depth and comprehensive error handling. +Provides MCP tools for extracting company information from LinkedIn +with comprehensive error handling. """ import logging -from typing import Any, Dict, List +from typing import Any, Dict from fastmcp import FastMCP -from linkedin_scraper import Company +from linkedin_scraper import CompanyScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver +from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.drivers.browser import ( + ensure_authenticated, + get_or_create_browser, +) +from linkedin_mcp_server.error_handler import handle_tool_error logger = logging.getLogger(__name__) @@ -23,7 +27,7 @@ def register_company_tools(mcp: FastMCP) -> None: Register all company-related tools with the MCP server. Args: - mcp (FastMCP): The MCP server instance + mcp: The MCP server instance """ @mcp.tool( @@ -34,77 +38,31 @@ def register_company_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_company_profile( - company_name: str, get_employees: bool = False - ) -> Dict[str, Any]: + async def get_company_profile(company_name: str) -> Dict[str, Any]: """ Get a specific company's LinkedIn profile. Args: - company_name (str): LinkedIn company name (e.g., "docker", "anthropic", "microsoft") - get_employees (bool): Whether to scrape the company's employees (slower) + company_name: LinkedIn company name (e.g., "docker", "anthropic", "microsoft") Returns: - Dict[str, Any]: Structured data from the company's profile + Structured data from the company's profile including name, about, + headquarters, industry, size, and more. """ try: - # Construct clean LinkedIn URL from company name - linkedin_url = f"https://www.linkedin.com/company/{company_name}/" + # Validate session before scraping + await ensure_authenticated() - driver = safe_get_driver() + # Construct LinkedIn URL from company name + linkedin_url = f"https://www.linkedin.com/company/{company_name}/" logger.info(f"Scraping company: {linkedin_url}") - if get_employees: - logger.info("Fetching employees may take a while...") - - company = Company( - linkedin_url, - driver=driver, - get_employees=get_employees, - close_on_complete=False, - ) - - # Convert showcase pages to structured dictionaries - showcase_pages: List[Dict[str, Any]] = [ - { - "name": page.name, - "linkedin_url": page.linkedin_url, - "followers": page.followers, - } - for page in company.showcase_pages - ] - - # Convert affiliated companies to structured dictionaries - affiliated_companies: List[Dict[str, Any]] = [ - { - "name": affiliated.name, - "linkedin_url": affiliated.linkedin_url, - "followers": affiliated.followers, - } - for affiliated in company.affiliated_companies - ] - - # Build the result dictionary - result: Dict[str, Any] = { - "name": company.name, - "about_us": company.about_us, - "website": company.website, - "phone": company.phone, - "headquarters": company.headquarters, - "founded": company.founded, - "industry": company.industry, - "company_type": company.company_type, - "company_size": company.company_size, - "specialties": company.specialties, - "showcase_pages": showcase_pages, - "affiliated_companies": affiliated_companies, - "headcount": company.headcount, - } - - # Add employees if requested and available - if get_employees and company.employees: - result["employees"] = company.employees - - return result + + browser = await get_or_create_browser() + scraper = CompanyScraper(browser.page, callback=MCPProgressCallback()) + company = await scraper.scrape(linkedin_url) + + return company.to_dict() + except Exception as e: return handle_tool_error(e, "get_company_profile") diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 973a4b37..a069121d 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -1,23 +1,23 @@ -# src/linkedin_mcp_server/tools/job.py """ -LinkedIn job scraping tools with search and detail extraction capabilities. +LinkedIn job scraping tools with search and detail extraction. -Provides MCP tools for job posting details, job searches, and recommendations +Provides MCP tools for job posting details and job searches with comprehensive filtering and structured data extraction. """ import logging -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from fastmcp import FastMCP -from linkedin_scraper import Job, JobSearch +from linkedin_scraper import JobScraper, JobSearchScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.error_handler import ( - handle_tool_error, - handle_tool_error_list, - safe_get_driver, +from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.drivers.browser import ( + ensure_authenticated, + get_or_create_browser, ) +from linkedin_mcp_server.error_handler import handle_tool_error, handle_tool_error_list logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ def register_job_tools(mcp: FastMCP) -> None: Register all job-related tools with the MCP server. Args: - mcp (FastMCP): The MCP server instance + mcp: The MCP server instance """ @mcp.tool( @@ -40,26 +40,30 @@ def register_job_tools(mcp: FastMCP) -> None: ) async def get_job_details(job_id: str) -> Dict[str, Any]: """ - Get job details for a specific job posting on LinkedIn + Get job details for a specific job posting on LinkedIn. Args: - job_id (str): LinkedIn job ID (e.g., "4252026496", "3856789012") + job_id: LinkedIn job ID (e.g., "4252026496", "3856789012") Returns: - Dict[str, Any]: Structured job data including title, company, location, posting date, - application count, and job description (may be empty if content is protected) + Structured job data including title, company, location, + posting date, and job description. """ try: - # Construct clean LinkedIn URL from job ID - job_url = f"https://www.linkedin.com/jobs/view/{job_id}/" + # Validate session before scraping + await ensure_authenticated() - driver = safe_get_driver() + # Construct LinkedIn URL from job ID + job_url = f"https://www.linkedin.com/jobs/view/{job_id}/" logger.info(f"Scraping job: {job_url}") - job = Job(job_url, driver=driver, close_on_complete=False) - # Convert job object to a dictionary + browser = await get_or_create_browser() + scraper = JobScraper(browser.page, callback=MCPProgressCallback()) + job = await scraper.scrape(job_url) + return job.to_dict() + except Exception as e: return handle_tool_error(e, "get_job_details") @@ -71,57 +75,38 @@ async def get_job_details(job_id: str) -> Dict[str, Any]: openWorldHint=True, ) ) - async def search_jobs(search_term: str) -> List[Dict[str, Any]]: + async def search_jobs( + keywords: str, + location: Optional[str] = None, + limit: int = 25, + ) -> List[str] | List[Dict[str, Any]]: """ - Search for jobs on LinkedIn using a search term. + Search for jobs on LinkedIn. Args: - search_term (str): Search term to use for the job search. + keywords: Search keywords (e.g., "software engineer", "data scientist") + location: Optional location filter (e.g., "San Francisco", "Remote") + limit: Maximum number of job URLs to return (default: 25) Returns: - List[Dict[str, Any]]: List of job search results + List of job posting URLs. Use get_job_details to get full details + for specific jobs. """ try: - driver = safe_get_driver() + # Validate session before scraping + await ensure_authenticated() - logger.info(f"Searching jobs: {search_term}") - job_search = JobSearch(driver=driver, close_on_complete=False, scrape=False) - jobs = job_search.search(search_term) + logger.info(f"Searching jobs: keywords='{keywords}', location='{location}'") - # Convert job objects to dictionaries - return [job.to_dict() for job in jobs] - except Exception as e: - return handle_tool_error_list(e, "search_jobs") - - @mcp.tool( - annotations=ToolAnnotations( - title="Get Recommended Jobs", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) - ) - async def get_recommended_jobs() -> List[Dict[str, Any]]: - """ - Get your personalized recommended jobs from LinkedIn - - Returns: - List[Dict[str, Any]]: List of recommended jobs - """ - try: - driver = safe_get_driver() - - logger.info("Getting recommended jobs") - job_search = JobSearch( - driver=driver, - close_on_complete=False, - scrape=True, # Enable scraping to get recommended jobs - scrape_recommended_jobs=True, + browser = await get_or_create_browser() + scraper = JobSearchScraper(browser.page, callback=MCPProgressCallback()) + job_urls = await scraper.search( + keywords=keywords, + location=location, + limit=limit, ) - if hasattr(job_search, "recommended_jobs") and job_search.recommended_jobs: - return [job.to_dict() for job in job_search.recommended_jobs] - else: - return [] + return job_urls + except Exception as e: - return handle_tool_error_list(e, "get_recommended_jobs") + return handle_tool_error_list(e, "search_jobs") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index a5c1b0a2..1524c415 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -1,19 +1,23 @@ -# src/linkedin_mcp_server/tools/person.py """ -LinkedIn person profile scraping tools with structured data extraction. +LinkedIn person profile scraping tools. Provides MCP tools for extracting comprehensive LinkedIn profile information including -experience, education, skills, and contact details with proper error handling. +experience, education, skills, and contact details. """ import logging -from typing import Any, Dict, List +from typing import Any, Dict from fastmcp import FastMCP -from linkedin_scraper import Person +from linkedin_scraper import PersonScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.error_handler import handle_tool_error, safe_get_driver +from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.drivers.browser import ( + ensure_authenticated, + get_or_create_browser, +) +from linkedin_mcp_server.error_handler import handle_tool_error logger = logging.getLogger(__name__) @@ -23,7 +27,7 @@ def register_person_tools(mcp: FastMCP) -> None: Register all person-related tools with the MCP server. Args: - mcp (FastMCP): The MCP server instance + mcp: The MCP server instance """ @mcp.tool( @@ -39,77 +43,26 @@ async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: Get a specific person's LinkedIn profile. Args: - linkedin_username (str): LinkedIn username (e.g., "stickerdaniel", "anistji") + linkedin_username: LinkedIn username (e.g., "stickerdaniel", "williamhgates") Returns: - Dict[str, Any]: Structured data from the person's profile + Structured data from the person's profile including name, about, + experiences, educations, and more. """ try: - # Construct clean LinkedIn URL from username - linkedin_url = f"https://www.linkedin.com/in/{linkedin_username}/" + # Validate session before scraping + await ensure_authenticated() - driver = safe_get_driver() + # Construct LinkedIn URL from username + linkedin_url = f"https://www.linkedin.com/in/{linkedin_username}/" logger.info(f"Scraping profile: {linkedin_url}") - person = Person(linkedin_url, driver=driver, close_on_complete=False) - - # Convert experiences to structured dictionaries - experiences: List[Dict[str, Any]] = [ - { - "position_title": exp.position_title, - "company": exp.institution_name, - "from_date": exp.from_date, - "to_date": exp.to_date, - "duration": exp.duration, - "location": exp.location, - "description": exp.description, - } - for exp in person.experiences - ] - - # Convert educations to structured dictionaries - educations: List[Dict[str, Any]] = [ - { - "institution": edu.institution_name, - "degree": edu.degree, - "from_date": edu.from_date, - "to_date": edu.to_date, - "description": edu.description, - } - for edu in person.educations - ] - - # Convert interests to list of titles - interests: List[str] = [interest.title for interest in person.interests] - - # Convert accomplishments to structured dictionaries - accomplishments: List[Dict[str, str]] = [ - {"category": acc.category, "title": acc.title} - for acc in person.accomplishments - ] - - # Convert contacts to structured dictionaries - contacts: List[Dict[str, str]] = [ - { - "name": contact.name, - "occupation": contact.occupation, - "url": contact.url, - } - for contact in person.contacts - ] - - # Return the complete profile data - return { - "name": person.name, - "about": person.about, - "experiences": experiences, - "educations": educations, - "interests": interests, - "accomplishments": accomplishments, - "contacts": contacts, - "company": person.company, - "job_title": person.job_title, - "open_to_work": getattr(person, "open_to_work", False), - } + + browser = await get_or_create_browser() + scraper = PersonScraper(browser.page, callback=MCPProgressCallback()) + person = await scraper.scrape(linkedin_url) + + return person.to_dict() + except Exception as e: return handle_tool_error(e, "get_person_profile") diff --git a/linkedin_mcp_server/utils/__init__.py b/linkedin_mcp_server/utils/__init__.py new file mode 100644 index 00000000..4e54edd5 --- /dev/null +++ b/linkedin_mcp_server/utils/__init__.py @@ -0,0 +1,5 @@ +"""Utility functions for LinkedIn MCP Server.""" + +from linkedin_mcp_server.utils.retry import retry_async + +__all__ = ["retry_async"] diff --git a/linkedin_mcp_server/utils/retry.py b/linkedin_mcp_server/utils/retry.py new file mode 100644 index 00000000..ee3c7742 --- /dev/null +++ b/linkedin_mcp_server/utils/retry.py @@ -0,0 +1,69 @@ +""" +Retry utilities for handling transient failures. + +Provides exponential backoff retry decorator for async functions. +""" + +import asyncio +import logging +from functools import wraps +from typing import Any, Callable, Tuple, Type, TypeVar + +from playwright.async_api import TimeoutError as PlaywrightTimeoutError + +logger = logging.getLogger(__name__) + +F = TypeVar("F", bound=Callable[..., Any]) + + +def retry_async( + max_attempts: int = 3, + backoff: float = 2.0, + exceptions: Tuple[Type[Exception], ...] = (PlaywrightTimeoutError,), +) -> Callable[[F], F]: + """ + Decorator for retrying async functions with exponential backoff. + + Args: + max_attempts: Maximum number of retry attempts (default: 3) + backoff: Backoff multiplier - wait time doubles each retry (default: 2.0) + exceptions: Tuple of exception types to retry on + + Returns: + Decorated function with retry logic + + Example: + @retry_async(max_attempts=3, backoff=2.0) + async def scrape_profile(url: str): + ... + """ + + def decorator(func: F) -> F: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + last_exception: Exception | None = None + + for attempt in range(max_attempts): + try: + return await func(*args, **kwargs) + except exceptions as e: + last_exception = e + if attempt < max_attempts - 1: + wait_time = backoff**attempt + logger.warning( + f"Attempt {attempt + 1}/{max_attempts} failed: {e}. " + f"Retrying in {wait_time:.1f}s..." + ) + await asyncio.sleep(wait_time) + else: + logger.error( + f"All {max_attempts} attempts failed for {getattr(func, '__name__', repr(func))}" + ) + + if last_exception: + raise last_exception + raise RuntimeError("Unexpected state in retry logic") + + return wrapper # type: ignore[return-value] + + return decorator diff --git a/manifest.json b/manifest.json index b79174db..9a448816 100644 --- a/manifest.json +++ b/manifest.json @@ -24,7 +24,7 @@ "command": "docker", "args": [ "run", "--rm", "-i", - "-e", "LINKEDIN_COOKIE=${user_config.linkedin_cookie}", + "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", "stickerdaniel/linkedin-mcp-server:1.4.1" ] @@ -56,15 +56,7 @@ "description": "Properly close browser session and clean up resources" } ], - "user_config": { - "linkedin_cookie": { - "title": "LinkedIn Cookie", - "description": "LinkedIn li_at session cookie. Follow the instructions in the README to get it.", - "type": "string", - "required": true, - "sensitive": true - } - }, + "user_config": {}, "compatibility": { "claude_desktop": ">=0.10.0", "dxt_version": ">=0.1", diff --git a/pyproject.toml b/pyproject.toml index 1d630891..5689988d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,3 +36,6 @@ dev = [ "ruff>=0.11.11", "ty>=0.0.1a12", ] + +[tool.ty.src] +exclude = ["docs/references/"] From 6b3c31ce614755bcf5565382c7e921a0b5254b70 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 11 Jan 2026 22:19:19 +0100 Subject: [PATCH 262/565] refactor(tools): remove dead retry code and add FastMCP progress reporting - Remove unused retry utility (linkedin-scraper v3 has built-in retry) - Add MCPContextProgressCallback to report progress to MCP clients - Wire FastMCP Context into all scraping tools - Remove claude-code-review workflow --- .github/workflows/claude-code-review.yml | 56 ------------------- linkedin_mcp_server/callbacks.py | 31 ++++++++++- linkedin_mcp_server/config/loaders.py | 21 -------- linkedin_mcp_server/config/schema.py | 3 -- linkedin_mcp_server/tools/company.py | 11 ++-- linkedin_mcp_server/tools/job.py | 15 ++++-- linkedin_mcp_server/tools/person.py | 13 +++-- linkedin_mcp_server/utils/__init__.py | 4 -- linkedin_mcp_server/utils/retry.py | 69 ------------------------ 9 files changed, 55 insertions(+), 168 deletions(-) delete mode 100644 .github/workflows/claude-code-review.yml delete mode 100644 linkedin_mcp_server/utils/retry.py diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml deleted file mode 100644 index ed31f986..00000000 --- a/.github/workflows/claude-code-review.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Claude Code Review - -on: - pull_request: - types: [opened, synchronize] - # Optional: Only run on specific file changes - # paths: - # - "src/**/*.ts" - # - "src/**/*.tsx" - # - "src/**/*.js" - # - "src/**/*.jsx" - -jobs: - claude-review: - # Optional: Filter by PR author - # if: | - # github.event.pull_request.user.login == 'external-contributor' || - # github.event.pull_request.user.login == 'new-developer' || - # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' - - runs-on: ubuntu-latest - permissions: - contents: read - pull-requests: read - issues: read - id-token: write - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 1 - - - name: Run Claude Code Review - id: claude-review - uses: anthropics/claude-code-action@v1 - with: - claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} - prompt: | - REPO: ${{ github.repository }} - PR NUMBER: ${{ github.event.pull_request.number }} - - Please review this pull request and provide feedback on: - - Code quality and best practices - - Potential bugs or issues - - Performance considerations - - Security concerns - - Test coverage - - Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback. - - Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR. - - # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md - # or https://code.claude.com/docs/en/cli-reference for available options - claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"' diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index 2f83fa3a..b6b63414 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -1,19 +1,21 @@ """ Progress callbacks for MCP tools. -Provides callback implementations that log progress for LinkedIn scraping operations. +Provides callback implementations that log progress for LinkedIn scraping operations +and report progress to MCP clients via FastMCP Context. """ import logging from typing import Any +from fastmcp import Context from linkedin_scraper.callbacks import ProgressCallback logger = logging.getLogger(__name__) class MCPProgressCallback(ProgressCallback): - """Callback that logs progress for MCP tools.""" + """Callback that logs progress for MCP tools (server-side only).""" async def on_start(self, scraper_type: str, url: str) -> None: """Log when scraping starts.""" @@ -32,6 +34,31 @@ async def on_error(self, error: Exception) -> None: logger.error(f"Scrape error: {error}") +class MCPContextProgressCallback(ProgressCallback): + """Callback that reports progress to MCP clients via FastMCP Context.""" + + def __init__(self, ctx: Context): + self.ctx = ctx + + async def on_start(self, scraper_type: str, url: str) -> None: + """Report start to MCP client.""" + await self.ctx.report_progress( + progress=0, total=100, message=f"Starting {scraper_type}" + ) + + async def on_progress(self, message: str, percent: int) -> None: + """Report progress to MCP client.""" + await self.ctx.report_progress(progress=percent, total=100, message=message) + + async def on_complete(self, scraper_type: str, result: Any) -> None: + """Report completion to MCP client.""" + await self.ctx.report_progress(progress=100, total=100, message="Complete") + + async def on_error(self, error: Exception) -> None: + """Log errors (errors are handled by tool error handling).""" + logger.error(f"Scrape error: {error}") + + class SilentCallback(ProgressCallback): """Callback that produces no output - useful for background operations.""" diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index d96a2e08..96882fb7 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -140,23 +140,6 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Browser viewport size (default: 1280x720)", ) - # Retry configuration - parser.add_argument( - "--retry-attempts", - type=int, - default=3, - metavar="N", - help="Max retry attempts for transient failures (default: 3)", - ) - - parser.add_argument( - "--retry-backoff", - type=float, - default=2.0, - metavar="S", - help="Backoff multiplier between retries (default: 2.0)", - ) - # Session management parser.add_argument( "--get-session", @@ -216,10 +199,6 @@ def load_from_args(config: AppConfig) -> AppConfig: except ValueError: logger.warning(f"Invalid viewport format: {args.viewport}, using default") - # Retry configuration - config.server.retry_attempts = args.retry_attempts - config.server.retry_backoff = args.retry_backoff - # Session management if args.get_session is not None: config.server.get_session = True diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index d535381f..ad3d3439 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -37,9 +37,6 @@ class ServerConfig: session_output_path: Optional[str] = None session_info: bool = False # Check session validity and exit clear_session: bool = False - # Retry configuration - retry_attempts: int = 3 - retry_backoff: float = 2.0 # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 5bd99dad..c16910f0 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -8,11 +8,11 @@ import logging from typing import Any, Dict -from fastmcp import FastMCP +from fastmcp import Context, FastMCP from linkedin_scraper import CompanyScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, @@ -38,12 +38,13 @@ def register_company_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_company_profile(company_name: str) -> Dict[str, Any]: + async def get_company_profile(company_name: str, ctx: Context) -> Dict[str, Any]: """ Get a specific company's LinkedIn profile. Args: company_name: LinkedIn company name (e.g., "docker", "anthropic", "microsoft") + ctx: FastMCP context for progress reporting Returns: Structured data from the company's profile including name, about, @@ -59,7 +60,9 @@ async def get_company_profile(company_name: str) -> Dict[str, Any]: logger.info(f"Scraping company: {linkedin_url}") browser = await get_or_create_browser() - scraper = CompanyScraper(browser.page, callback=MCPProgressCallback()) + scraper = CompanyScraper( + browser.page, callback=MCPContextProgressCallback(ctx) + ) company = await scraper.scrape(linkedin_url) return company.to_dict() diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index a069121d..1dee895f 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -8,11 +8,11 @@ import logging from typing import Any, Dict, List, Optional -from fastmcp import FastMCP +from fastmcp import Context, FastMCP from linkedin_scraper import JobScraper, JobSearchScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, @@ -38,12 +38,13 @@ def register_job_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_job_details(job_id: str) -> Dict[str, Any]: + async def get_job_details(job_id: str, ctx: Context) -> Dict[str, Any]: """ Get job details for a specific job posting on LinkedIn. Args: job_id: LinkedIn job ID (e.g., "4252026496", "3856789012") + ctx: FastMCP context for progress reporting Returns: Structured job data including title, company, location, @@ -59,7 +60,7 @@ async def get_job_details(job_id: str) -> Dict[str, Any]: logger.info(f"Scraping job: {job_url}") browser = await get_or_create_browser() - scraper = JobScraper(browser.page, callback=MCPProgressCallback()) + scraper = JobScraper(browser.page, callback=MCPContextProgressCallback(ctx)) job = await scraper.scrape(job_url) return job.to_dict() @@ -77,6 +78,7 @@ async def get_job_details(job_id: str) -> Dict[str, Any]: ) async def search_jobs( keywords: str, + ctx: Context, location: Optional[str] = None, limit: int = 25, ) -> List[str] | List[Dict[str, Any]]: @@ -85,6 +87,7 @@ async def search_jobs( Args: keywords: Search keywords (e.g., "software engineer", "data scientist") + ctx: FastMCP context for progress reporting location: Optional location filter (e.g., "San Francisco", "Remote") limit: Maximum number of job URLs to return (default: 25) @@ -99,7 +102,9 @@ async def search_jobs( logger.info(f"Searching jobs: keywords='{keywords}', location='{location}'") browser = await get_or_create_browser() - scraper = JobSearchScraper(browser.page, callback=MCPProgressCallback()) + scraper = JobSearchScraper( + browser.page, callback=MCPContextProgressCallback(ctx) + ) job_urls = await scraper.search( keywords=keywords, location=location, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 1524c415..8f0a030a 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -8,11 +8,11 @@ import logging from typing import Any, Dict -from fastmcp import FastMCP +from fastmcp import Context, FastMCP from linkedin_scraper import PersonScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPProgressCallback +from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, @@ -38,12 +38,15 @@ def register_person_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: + async def get_person_profile( + linkedin_username: str, ctx: Context + ) -> Dict[str, Any]: """ Get a specific person's LinkedIn profile. Args: linkedin_username: LinkedIn username (e.g., "stickerdaniel", "williamhgates") + ctx: FastMCP context for progress reporting Returns: Structured data from the person's profile including name, about, @@ -59,7 +62,9 @@ async def get_person_profile(linkedin_username: str) -> Dict[str, Any]: logger.info(f"Scraping profile: {linkedin_url}") browser = await get_or_create_browser() - scraper = PersonScraper(browser.page, callback=MCPProgressCallback()) + scraper = PersonScraper( + browser.page, callback=MCPContextProgressCallback(ctx) + ) person = await scraper.scrape(linkedin_url) return person.to_dict() diff --git a/linkedin_mcp_server/utils/__init__.py b/linkedin_mcp_server/utils/__init__.py index 4e54edd5..2785acad 100644 --- a/linkedin_mcp_server/utils/__init__.py +++ b/linkedin_mcp_server/utils/__init__.py @@ -1,5 +1 @@ """Utility functions for LinkedIn MCP Server.""" - -from linkedin_mcp_server.utils.retry import retry_async - -__all__ = ["retry_async"] diff --git a/linkedin_mcp_server/utils/retry.py b/linkedin_mcp_server/utils/retry.py deleted file mode 100644 index ee3c7742..00000000 --- a/linkedin_mcp_server/utils/retry.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Retry utilities for handling transient failures. - -Provides exponential backoff retry decorator for async functions. -""" - -import asyncio -import logging -from functools import wraps -from typing import Any, Callable, Tuple, Type, TypeVar - -from playwright.async_api import TimeoutError as PlaywrightTimeoutError - -logger = logging.getLogger(__name__) - -F = TypeVar("F", bound=Callable[..., Any]) - - -def retry_async( - max_attempts: int = 3, - backoff: float = 2.0, - exceptions: Tuple[Type[Exception], ...] = (PlaywrightTimeoutError,), -) -> Callable[[F], F]: - """ - Decorator for retrying async functions with exponential backoff. - - Args: - max_attempts: Maximum number of retry attempts (default: 3) - backoff: Backoff multiplier - wait time doubles each retry (default: 2.0) - exceptions: Tuple of exception types to retry on - - Returns: - Decorated function with retry logic - - Example: - @retry_async(max_attempts=3, backoff=2.0) - async def scrape_profile(url: str): - ... - """ - - def decorator(func: F) -> F: - @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - last_exception: Exception | None = None - - for attempt in range(max_attempts): - try: - return await func(*args, **kwargs) - except exceptions as e: - last_exception = e - if attempt < max_attempts - 1: - wait_time = backoff**attempt - logger.warning( - f"Attempt {attempt + 1}/{max_attempts} failed: {e}. " - f"Retrying in {wait_time:.1f}s..." - ) - await asyncio.sleep(wait_time) - else: - logger.error( - f"All {max_attempts} attempts failed for {getattr(func, '__name__', repr(func))}" - ) - - if last_exception: - raise last_exception - raise RuntimeError("Unexpected state in retry logic") - - return wrapper # type: ignore[return-value] - - return decorator From 5fbb6223d5a2e8701b431a130ccd92041b815ff8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 11 Jan 2026 23:19:17 +0100 Subject: [PATCH 263/565] refactor(Dockerfile): optimize dependency sync with caching and enhance browser reset functionality - Updated Dockerfile to use cache for faster dependency synchronization. - Added reset_browser_for_testing function to improve test isolation in browser management. - Adjusted job tool error handling to streamline exception management. --- Dockerfile | 5 +++-- docker-compose.yml | 10 ++++++++++ linkedin_mcp_server/drivers/__init__.py | 2 ++ linkedin_mcp_server/drivers/browser.py | 7 +++++++ linkedin_mcp_server/tools/job.py | 6 +++--- 5 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 docker-compose.yml diff --git a/Dockerfile b/Dockerfile index 6c6e7012..40f7a157 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,8 +13,9 @@ COPY --chown=pwuser:pwuser . /app # Switch to non-root user USER pwuser -# Sync dependencies and install project -RUN uv sync --frozen +# Sync dependencies and install project (with cache for faster rebuilds) +RUN --mount=type=cache,target=/home/pwuser/.cache/uv,uid=1000,gid=1000 \ + uv sync --frozen # Set entrypoint and default arguments ENTRYPOINT ["uv", "run", "-m", "linkedin_mcp_server"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..10b2507c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3.8' +services: + linkedin-mcp: + image: stickerdaniel/linkedin-mcp-server:latest + volumes: + - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp + environment: + - LOG_LEVEL=WARNING + stdin_open: true + tty: true diff --git a/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py index 69efafc0..288f89f9 100644 --- a/linkedin_mcp_server/drivers/__init__.py +++ b/linkedin_mcp_server/drivers/__init__.py @@ -19,6 +19,7 @@ close_browser, ensure_authenticated, get_or_create_browser, + reset_browser_for_testing, session_exists, set_headless, validate_session, @@ -30,6 +31,7 @@ "close_browser", "ensure_authenticated", "get_or_create_browser", + "reset_browser_for_testing", "session_exists", "set_headless", "validate_session", diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 9653b65b..a8a5da59 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -153,3 +153,10 @@ async def check_rate_limit() -> None: """ browser = await get_or_create_browser() await detect_rate_limit(browser.page) + + +def reset_browser_for_testing() -> None: + """Reset global browser state for test isolation.""" + global _browser, _headless + _browser = None + _headless = True diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 1dee895f..f9143a2e 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -17,7 +17,7 @@ ensure_authenticated, get_or_create_browser, ) -from linkedin_mcp_server.error_handler import handle_tool_error, handle_tool_error_list +from linkedin_mcp_server.error_handler import handle_tool_error logger = logging.getLogger(__name__) @@ -81,7 +81,7 @@ async def search_jobs( ctx: Context, location: Optional[str] = None, limit: int = 25, - ) -> List[str] | List[Dict[str, Any]]: + ) -> List[str] | Dict[str, Any]: """ Search for jobs on LinkedIn. @@ -114,4 +114,4 @@ async def search_jobs( return job_urls except Exception as e: - return handle_tool_error_list(e, "search_jobs") + return handle_tool_error(e, "search_jobs") From 8839f5d9278eff481630d38562fd3cae9d1e7ac3 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 11 Jan 2026 23:35:00 +0100 Subject: [PATCH 264/565] refactor(docker-compose, README, setup.py, job.py): enhance session management and documentation - Removed version specification from docker-compose.yml for flexibility. - Updated README to clarify session timeout details and added a warning about sensitive data in session files. - Improved comments in setup.py regarding manual login timeout. - Refactored job.py to return a dictionary with job URLs and count instead of a list for better data structure. --- README.md | 7 +++++-- docker-compose.yml | 1 - linkedin_mcp_server/drivers/browser.py | 3 +-- linkedin_mcp_server/setup.py | 3 ++- linkedin_mcp_server/tools/job.py | 10 +++++----- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index ae3f20b4..17103c55 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Create a session file locally, then mount it into Docker. uvx linkedin-mcp-server --get-session ``` -This opens a browser window where you log in manually. The session is saved to `~/.linkedin-mcp/session.json`. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. **Step 2: Configure Claude Desktop with Docker** @@ -121,6 +121,9 @@ This opens a browser window where you log in manually. The session is saved to ` > [!NOTE] > Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. +> [!WARNING] +> The session file at `~/.linkedin-mcp/session.json` contains sensitive authentication data. Keep it secure and do not share it. + > [!NOTE] > **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server, so Playwright can't show a browser window. You must create the session on your host machine first, then mount it into Docker. @@ -232,7 +235,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ linkedin-mcp-server --get-session ``` -This opens a browser for you to log in manually. The session is saved to `~/.linkedin-mcp/session.json`. +This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. **Step 2: Run the server** diff --git a/docker-compose.yml b/docker-compose.yml index 10b2507c..5acc1821 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,3 @@ -version: '3.8' services: linkedin-mcp: image: stickerdaniel/linkedin-mcp-server:latest diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index a8a5da59..c6ed41d8 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -7,6 +7,7 @@ """ import logging +import os from pathlib import Path from typing import Optional, cast @@ -23,8 +24,6 @@ def _get_linkedin_cookie() -> Optional[str]: """Get LinkedIn cookie from environment variable.""" - import os - return os.environ.get("LINKEDIN_COOKIE") diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index ae6a2aad..a0bc3de5 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -44,7 +44,8 @@ async def interactive_login_and_save(session_path: Optional[Path] = None) -> boo # Navigate to LinkedIn login await browser.page.goto("https://www.linkedin.com/login") - # Wait for manual login completion (5 minute timeout) + # Wait for manual login completion + # 5 minute timeout (300000ms) allows time for 2FA, captcha, security challenges await wait_for_manual_login(browser.page, timeout=300000) # Save session for future use diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index f9143a2e..9f6cef31 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, Dict, List, Optional +from typing import Any, Dict, Optional from fastmcp import Context, FastMCP from linkedin_scraper import JobScraper, JobSearchScraper @@ -81,7 +81,7 @@ async def search_jobs( ctx: Context, location: Optional[str] = None, limit: int = 25, - ) -> List[str] | Dict[str, Any]: + ) -> Dict[str, Any]: """ Search for jobs on LinkedIn. @@ -92,8 +92,8 @@ async def search_jobs( limit: Maximum number of job URLs to return (default: 25) Returns: - List of job posting URLs. Use get_job_details to get full details - for specific jobs. + Dict with job_urls list and count. Use get_job_details to get + full details for specific jobs. """ try: # Validate session before scraping @@ -111,7 +111,7 @@ async def search_jobs( limit=limit, ) - return job_urls + return {"job_urls": job_urls, "count": len(job_urls)} except Exception as e: return handle_tool_error(e, "search_jobs") From 845a07a47ebbb7799f71a37b77be6c9c8b9ce322 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 11 Jan 2026 23:39:52 +0100 Subject: [PATCH 265/565] style: use PEP 604 union syntax for type hints Replace Optional[X] with X | None throughout the codebase. This is the modern Python convention for Python 3.10+. --- linkedin_mcp_server/authentication.py | 6 +++--- linkedin_mcp_server/config/__init__.py | 3 +-- linkedin_mcp_server/config/schema.py | 8 ++++---- linkedin_mcp_server/drivers/browser.py | 12 ++++++------ linkedin_mcp_server/setup.py | 5 ++--- linkedin_mcp_server/tools/job.py | 4 ++-- 6 files changed, 18 insertions(+), 20 deletions(-) diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 92fd3d02..9873c9bc 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -8,7 +8,7 @@ import logging import os from pathlib import Path -from typing import Literal, Optional +from typing import Literal from linkedin_mcp_server.drivers.browser import ( DEFAULT_SESSION_PATH, @@ -21,7 +21,7 @@ AuthSource = Literal["session", "cookie"] -def get_linkedin_cookie() -> Optional[str]: +def get_linkedin_cookie() -> str | None: """Get LinkedIn cookie from environment variable.""" return os.environ.get("LINKEDIN_COOKIE") @@ -62,7 +62,7 @@ def get_authentication_source() -> AuthSource: ) -def clear_session(session_path: Optional[Path] = None) -> bool: +def clear_session(session_path: Path | None = None) -> bool: """ Clear stored session file. diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index e69da0d2..771e2acb 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -6,7 +6,6 @@ """ import logging -from typing import Optional from .loaders import load_config from .schema import AppConfig, BrowserConfig, ServerConfig @@ -14,7 +13,7 @@ logger = logging.getLogger(__name__) # Singleton pattern for configuration -_config: Optional[AppConfig] = None +_config: AppConfig | None = None def get_config() -> AppConfig: diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index ad3d3439..aaff72e8 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -6,7 +6,7 @@ """ from dataclasses import dataclass, field -from typing import Literal, Optional +from typing import Literal class ConfigurationError(Exception): @@ -21,7 +21,7 @@ class BrowserConfig: headless: bool = True slow_mo: int = 0 # Milliseconds between browser actions (debugging) - user_agent: Optional[str] = None # Custom browser user agent + user_agent: str | None = None # Custom browser user agent viewport_width: int = 1280 viewport_height: int = 720 @@ -34,7 +34,7 @@ class ServerConfig: transport_explicitly_set: bool = False log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "WARNING" get_session: bool = False - session_output_path: Optional[str] = None + session_output_path: str | None = None session_info: bool = False # Check session validity and exit clear_session: bool = False # HTTP transport configuration @@ -42,7 +42,7 @@ class ServerConfig: port: int = 8000 path: str = "/mcp" # Cookie authentication - linkedin_cookie: Optional[str] = None + linkedin_cookie: str | None = None @dataclass diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index c6ed41d8..be340739 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -9,7 +9,7 @@ import logging import os from pathlib import Path -from typing import Optional, cast +from typing import cast from linkedin_scraper import ( AuthenticationError, @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -def _get_linkedin_cookie() -> Optional[str]: +def _get_linkedin_cookie() -> str | None: """Get LinkedIn cookie from environment variable.""" return os.environ.get("LINKEDIN_COOKIE") @@ -31,13 +31,13 @@ def _get_linkedin_cookie() -> Optional[str]: DEFAULT_SESSION_PATH = Path.home() / ".linkedin-mcp" / "session.json" # Global browser instance (singleton) -_browser: Optional[BrowserManager] = None +_browser: BrowserManager | None = None _headless: bool = True async def get_or_create_browser( - headless: Optional[bool] = None, - session_path: Optional[Path] = None, + headless: bool | None = None, + session_path: Path | None = None, ) -> BrowserManager: """ Get existing browser or create and initialize a new one. @@ -104,7 +104,7 @@ async def close_browser() -> None: logger.info("Browser closed") -def session_exists(session_path: Optional[Path] = None) -> bool: +def session_exists(session_path: Path | None = None) -> bool: """Check if a session file exists.""" if session_path is None: session_path = DEFAULT_SESSION_PATH diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index a0bc3de5..d1fbb2ae 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -8,7 +8,6 @@ import asyncio import logging from pathlib import Path -from typing import Optional from linkedin_scraper import BrowserManager, wait_for_manual_login @@ -17,7 +16,7 @@ logger = logging.getLogger(__name__) -async def interactive_login_and_save(session_path: Optional[Path] = None) -> bool: +async def interactive_login_and_save(session_path: Path | None = None) -> bool: """ Open browser for manual LinkedIn login and save session. @@ -56,7 +55,7 @@ async def interactive_login_and_save(session_path: Optional[Path] = None) -> boo return True -def run_session_creation(output_path: Optional[str] = None) -> bool: +def run_session_creation(output_path: str | None = None) -> bool: """ Create session via interactive login and save to file. diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 9f6cef31..37de8326 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, Dict, Optional +from typing import Any, Dict from fastmcp import Context, FastMCP from linkedin_scraper import JobScraper, JobSearchScraper @@ -79,7 +79,7 @@ async def get_job_details(job_id: str, ctx: Context) -> Dict[str, Any]: async def search_jobs( keywords: str, ctx: Context, - location: Optional[str] = None, + location: str | None = None, limit: int = 25, ) -> Dict[str, Any]: """ From 6c319a36a9c77a9ae96e21c964849012f592c9e5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 11 Jan 2026 23:58:45 +0100 Subject: [PATCH 266/565] refactor(authentication, callbacks, browser, error_handler, cli_main, utils): streamline authentication and error handling - Removed unused functions and consolidated cookie retrieval into a new utility function. - Updated error handling to provide clearer feedback on authentication failures. - Enhanced CLI exit behavior to ensure browser cleanup. - Simplified progress callback implementations by removing redundant classes. - Improved documentation and comments for better clarity on usage. --- linkedin_mcp_server/authentication.py | 12 ++++-------- linkedin_mcp_server/callbacks.py | 26 -------------------------- linkedin_mcp_server/cli_main.py | 6 +++++- linkedin_mcp_server/drivers/browser.py | 21 ++++++++------------- linkedin_mcp_server/error_handler.py | 18 +----------------- linkedin_mcp_server/utils/__init__.py | 7 +++++++ 6 files changed, 25 insertions(+), 65 deletions(-) diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 9873c9bc..898e07a9 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -6,7 +6,6 @@ """ import logging -import os from pathlib import Path from typing import Literal @@ -15,17 +14,13 @@ session_exists, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError +from linkedin_mcp_server.utils import get_linkedin_cookie logger = logging.getLogger(__name__) AuthSource = Literal["session", "cookie"] -def get_linkedin_cookie() -> str | None: - """Get LinkedIn cookie from environment variable.""" - return os.environ.get("LINKEDIN_COOKIE") - - def get_authentication_source() -> AuthSource: """ Check available authentication methods in priority order. @@ -57,8 +52,9 @@ def get_authentication_source() -> AuthSource: " 2. Set LINKEDIN_COOKIE environment variable with your li_at cookie\n" " 3. Run with --no-headless to login interactively\n\n" "For Docker users:\n" - " docker run -it -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp \\\n" - " stickerdaniel/linkedin-mcp-server:latest --get-session" + " Create session on host first: uvx linkedin-mcp-server --get-session\n" + " Then mount into Docker: -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp\n" + " Or set LINKEDIN_COOKIE environment variable: -e LINKEDIN_COOKIE=your_li_at" ) diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index b6b63414..dcde2b1d 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -14,26 +14,6 @@ logger = logging.getLogger(__name__) -class MCPProgressCallback(ProgressCallback): - """Callback that logs progress for MCP tools (server-side only).""" - - async def on_start(self, scraper_type: str, url: str) -> None: - """Log when scraping starts.""" - logger.info(f"Starting {scraper_type} scrape: {url}") - - async def on_progress(self, message: str, percent: int) -> None: - """Log progress updates.""" - logger.debug(f"Progress ({percent}%): {message}") - - async def on_complete(self, scraper_type: str, result: Any) -> None: - """Log when scraping completes.""" - logger.info(f"Completed {scraper_type} scrape") - - async def on_error(self, error: Exception) -> None: - """Log errors during scraping.""" - logger.error(f"Scrape error: {error}") - - class MCPContextProgressCallback(ProgressCallback): """Callback that reports progress to MCP clients via FastMCP Context.""" @@ -57,9 +37,3 @@ async def on_complete(self, scraper_type: str, result: Any) -> None: async def on_error(self, error: Exception) -> None: """Log errors (errors are handled by tool error handling).""" logger.error(f"Scrape error: {error}") - - -class SilentCallback(ProgressCallback): - """Callback that produces no output - useful for background operations.""" - - pass diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 4b34c3c0..17d17fb0 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -318,8 +318,12 @@ def main() -> None: def exit_gracefully(exit_code: int = 0) -> None: - """Exit the application gracefully.""" + """Exit the application gracefully with browser cleanup.""" print("๐Ÿ‘‹ Shutting down LinkedIn MCP server...") + try: + asyncio.run(close_browser()) + except Exception: + pass # Best effort cleanup sys.exit(exit_code) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index be340739..65f15318 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -7,7 +7,6 @@ """ import logging -import os from pathlib import Path from typing import cast @@ -17,14 +16,11 @@ is_logged_in, login_with_cookie, ) -from linkedin_scraper.core import detect_rate_limit, warm_up_browser - -logger = logging.getLogger(__name__) +from linkedin_scraper.core import detect_rate_limit +from linkedin_mcp_server.utils import get_linkedin_cookie -def _get_linkedin_cookie() -> str | None: - """Get LinkedIn cookie from environment variable.""" - return os.environ.get("LINKEDIN_COOKIE") +logger = logging.getLogger(__name__) # Default session file location @@ -77,7 +73,7 @@ async def get_or_create_browser( logger.warning(f"Failed to load session: {e}") # Priority 2: Use cookie from environment - if cookie := _get_linkedin_cookie(): + if cookie := get_linkedin_cookie(): try: await login_with_cookie(_browser.page, cookie) logger.info("Authenticated using LINKEDIN_COOKIE") @@ -85,11 +81,10 @@ async def get_or_create_browser( except Exception as e: logger.warning(f"Cookie authentication failed: {e}") - # No auth available - warm up for manual login - logger.info("No authentication found, warming up browser...") - await warm_up_browser(_browser.page) - - return _browser + # No auth available - fail fast with clear error + raise AuthenticationError( + "No authentication found. Run with --get-session to create a session." + ) async def close_browser() -> None: diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 679c3aab..bc51c395 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, Dict, List +from typing import Any, Dict from linkedin_scraper.core.exceptions import ( AuthenticationError, @@ -42,22 +42,6 @@ def handle_tool_error(exception: Exception, context: str = "") -> Dict[str, Any] return convert_exception_to_response(exception, context) -def handle_tool_error_list( - exception: Exception, context: str = "" -) -> List[Dict[str, Any]]: - """ - Handle errors from tool functions that return lists. - - Args: - exception: The exception that occurred - context: Context about which tool failed - - Returns: - List containing structured error response dictionary - """ - return [convert_exception_to_response(exception, context)] - - def convert_exception_to_response( exception: Exception, context: str = "" ) -> Dict[str, Any]: diff --git a/linkedin_mcp_server/utils/__init__.py b/linkedin_mcp_server/utils/__init__.py index 2785acad..6ec0b995 100644 --- a/linkedin_mcp_server/utils/__init__.py +++ b/linkedin_mcp_server/utils/__init__.py @@ -1 +1,8 @@ """Utility functions for LinkedIn MCP Server.""" + +import os + + +def get_linkedin_cookie() -> str | None: + """Get LinkedIn cookie from environment variable.""" + return os.environ.get("LINKEDIN_COOKIE") From 5ecca3387d37b84529114d48724b8ac527258b2d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 00:29:02 +0100 Subject: [PATCH 267/565] refactor(callbacks): remove logging and error handling from MCPContextProgressCallback - Eliminated the logger and the on_error method from MCPContextProgressCallback to streamline the callback implementation. - Simplified the code by removing unused error logging, focusing on progress reporting functionality. --- linkedin_mcp_server/callbacks.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index dcde2b1d..d87664ee 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -5,14 +5,11 @@ and report progress to MCP clients via FastMCP Context. """ -import logging from typing import Any from fastmcp import Context from linkedin_scraper.callbacks import ProgressCallback -logger = logging.getLogger(__name__) - class MCPContextProgressCallback(ProgressCallback): """Callback that reports progress to MCP clients via FastMCP Context.""" @@ -33,7 +30,3 @@ async def on_progress(self, message: str, percent: int) -> None: async def on_complete(self, scraper_type: str, result: Any) -> None: """Report completion to MCP client.""" await self.ctx.report_progress(progress=100, total=100, message="Complete") - - async def on_error(self, error: Exception) -> None: - """Log errors (errors are handled by tool error handling).""" - logger.error(f"Scrape error: {error}") From e86cb7c8606260f5393deb3db1c877417d9e33cc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 00:38:41 +0100 Subject: [PATCH 268/565] fix(vscode): update task configurations for MCP server - Modified task labels and details for clarity, removing unnecessary options like '--no-lazy-init'. - Enhanced descriptions to better reflect the functionality of the tasks related to running the MCP server and following logs. --- .vscode/tasks.json | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 92139fcf..f26d32a9 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -41,8 +41,8 @@ "problemMatcher": [] }, { - "label": "uv run -m linkedin_mcp_server --log-level DEBUG --no-headless --no-lazy-init", - "detail": "Run server in debug mode with visible window and login immediately", + "label": "uv run -m linkedin_mcp_server --log-level DEBUG --no-headless", + "detail": "Run server in debug mode with visible browser window", "type": "shell", "command": "uv", "args": [ @@ -51,8 +51,7 @@ "linkedin_mcp_server", "--log-level", "DEBUG", - "--no-headless", - "--no-lazy-init" + "--no-headless" ], "group": { "kind": "build", @@ -66,16 +65,15 @@ "problemMatcher": [] }, { - "label": "uv run -m linkedin_mcp_server --no-headless --no-lazy-init", - "detail": "Run server with visible window and login immediately", + "label": "uv run -m linkedin_mcp_server --no-headless", + "detail": "Run server with visible browser window", "type": "shell", "command": "uv", "args": [ "run", "-m", "linkedin_mcp_server", - "--no-headless", - "--no-lazy-init" + "--no-headless" ], "group": { "kind": "build" @@ -88,7 +86,7 @@ "problemMatcher": [] }, { - "label": "uv run -m linkedin_mcp_server --no-headless --no-lazy-init --transport streamable-http", + "label": "uv run -m linkedin_mcp_server --no-headless --transport streamable-http", "detail": "Start HTTP MCP server on localhost:8000/mcp", "type": "shell", "command": "uv", @@ -97,7 +95,6 @@ "-m", "linkedin_mcp_server", "--no-headless", - "--no-lazy-init", "--transport", "streamable-http" ], @@ -114,8 +111,8 @@ "problemMatcher": [] }, { - "label": "uv run tail -n 20 -F ~/Library/Logs/Claude/mcp*.log", - "detail": "Follow Claude Desktop logs", + "label": "tail -n 20 -F ~/Library/Logs/Claude/mcp*.log", + "detail": "Follow Claude Desktop MCP logs", "type": "shell", "command": "tail", "args": [ From 9c6898d7791ff64bbcd299150ccfd2ccba8bc60b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 00:47:05 +0100 Subject: [PATCH 269/565] fix(error_handler, browser): improve authentication error messages - Updated the resolution message in error_handler to provide clearer instructions for re-authentication, including the use of the LINKEDIN_COOKIE environment variable. - Simplified the AuthenticationError message in browser.py to focus on session validity without additional instructions. --- linkedin_mcp_server/drivers/browser.py | 4 +--- linkedin_mcp_server/error_handler.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 65f15318..9ef2cb15 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -131,9 +131,7 @@ async def ensure_authenticated() -> None: AuthenticationError: If session is expired or invalid """ if not await validate_session(): - raise AuthenticationError( - "Session expired or invalid. Run with --get-session to re-authenticate." - ) + raise AuthenticationError("Session expired or invalid.") async def check_rate_limit() -> None: diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index bc51c395..31965e74 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -80,7 +80,7 @@ def convert_exception_to_response( return { "error": "authentication_failed", "message": str(exception), - "resolution": "Check your LinkedIn session. You may need to re-authenticate or complete a security challenge.", + "resolution": "Run with --get-session to re-authenticate (opens visible browser, not available in Docker), or set LINKEDIN_COOKIE environment variable.", } elif isinstance(exception, RateLimitError): From b45c828db04e283ae0016b2a9292b6dda898116d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 01:46:45 +0100 Subject: [PATCH 270/565] feat(env): add .env.example file and integrate python-dotenv for environment variable management - Introduced a new .env.example file to guide users in setting up environment variables for LinkedIn MCP server. - Added python-dotenv as a dependency to facilitate loading environment variables from the .env file. - Updated README to reflect changes in session management and authentication instructions. - Enhanced task configurations in VSCode for easier session management. - default playwright timeout is 5sec --- .env.example | 19 ++++++++++++++++ .vscode/tasks.json | 22 +++++++++++++++++++ README.md | 30 +++++++++++--------------- linkedin_mcp_server/config/loaders.py | 5 +++++ linkedin_mcp_server/drivers/browser.py | 11 +++++++++- pyproject.toml | 1 + uv.lock | 2 ++ 7 files changed, 71 insertions(+), 19 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..5f5ba673 --- /dev/null +++ b/.env.example @@ -0,0 +1,19 @@ +# LinkedIn MCP Server Environment Variables +# Copy this file to .env and fill in your values + +# LinkedIn session cookie (optional) +# Either set this, or run with --get-session to log in with email/password. +# Extract from browser: DevTools > Application > Cookies > linkedin.com > li_at +LINKEDIN_COOKIE= + +# Browser mode (default: true) +# true = headless, false = visible window +HEADLESS=true + +# Logging level (default: WARNING) +# Options: DEBUG, INFO, WARNING, ERROR +LOG_LEVEL=WARNING + +# Transport mode (leave empty for interactive prompt, defaults to stdio in non-interactive) +# Options: stdio, streamable-http +TRANSPORT= diff --git a/.vscode/tasks.json b/.vscode/tasks.json index f26d32a9..5fc74bcd 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -110,6 +110,28 @@ }, "problemMatcher": [] }, + { + "label": "uv run -m linkedin_mcp_server --get-session", + "detail": "Login to LinkedIn and save session (opens visible browser)", + "type": "shell", + "command": "uv", + "args": [ + "run", + "-m", + "linkedin_mcp_server", + "--get-session" + ], + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] + }, { "label": "tail -n 20 -F ~/Library/Logs/Claude/mcp*.log", "detail": "Follow Claude Desktop MCP logs", diff --git a/README.md b/README.md index 17103c55..598636d7 100644 --- a/README.md +++ b/README.md @@ -43,20 +43,13 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Tool Status:** -| Tool | Status | Notes | -|------|--------|-------| -| `get_person_profile` | โœ… Works | | -| `get_company_profile` | โœ… Works | | -| `search_jobs` | โœ… Works | Returns job URLs | -| `get_job_details` | โœ… Works | | -| `close_session` | โœ… Works | | - -> [!NOTE] -> The most reliable way to authenticate is to clone this repository and run the server manually in `--no-headless` mode where you can see the process visually and solve login challenges in the automated browser. -> -> The server saves your session to `~/.linkedin-mcp/session.json` after successful login. Sessions may expire over time, requiring re-authentication. -> -> Please try this approach first before opening an authentication related issue. +| Tool | Status | +|------|--------| +| `get_person_profile` | Working | +| `get_company_profile` | Working | +| `search_jobs` | Broken (upstream) | +| `get_job_details` | Working | +| `close_session` | Working | ## ๐Ÿณ Docker Setup (Recommended - Universal) @@ -85,6 +78,7 @@ Get your LinkedIn `li_at` cookie and pass it to Docker: ``` **To get your `li_at` cookie:** + 1. Open LinkedIn in your browser and log in 2. Open DevTools (F12) โ†’ Application โ†’ Cookies โ†’ linkedin.com 3. Copy the `li_at` cookie value @@ -96,7 +90,7 @@ Create a session file locally, then mount it into Docker. **Step 1: Create session using uvx (one-time setup)** ```bash -uvx linkedin-mcp-server --get-session +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session ``` This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. @@ -119,7 +113,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. > [!WARNING] > The session file at `~/.linkedin-mcp/session.json` contains sensitive authentication data. Keep it secure and do not share it. @@ -199,7 +193,7 @@ docker run -it --rm \ 3. Create a session using `--get-session` (see Docker instructions above) > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `--get-session` again. For debugging login issues, use the [local setup](#-local-setup-develop--contribute) with `--no-headless` mode. +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use the [local setup](#-local-setup-develop--contribute) with `--no-headless` mode. ### DXT Extension Setup Help @@ -244,7 +238,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `--get-session` again. For debugging login issues, use `--no-headless` to see the browser window. +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use `--no-headless` to see the browser window. ### uvx Setup Help diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 96882fb7..99adbed5 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -9,8 +9,13 @@ import os import sys +from dotenv import load_dotenv + from .schema import AppConfig +# Load .env file if present +load_dotenv() + logger = logging.getLogger(__name__) # Boolean value mappings for environment variable parsing diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 9ef2cb15..d96ec892 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -68,7 +68,15 @@ async def get_or_create_browser( try: await _browser.load_session(str(session_path)) logger.info(f"Loaded session from {session_path}") - return _browser + # Validate session is actually logged in + if await is_logged_in(_browser.page): + _browser.page.set_default_timeout( + 5000 + ) # 5s timeout for element operations + return _browser + logger.warning( + "Session loaded but expired, trying to create session from cookie" + ) except Exception as e: logger.warning(f"Failed to load session: {e}") @@ -77,6 +85,7 @@ async def get_or_create_browser( try: await login_with_cookie(_browser.page, cookie) logger.info("Authenticated using LINKEDIN_COOKIE") + _browser.page.set_default_timeout(5000) # 5s timeout for element operations return _browser except Exception as e: logger.warning(f"Cookie authentication failed: {e}") diff --git a/pyproject.toml b/pyproject.toml index 5689988d..e2c1b0fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ dependencies = [ "linkedin-scraper>=3.0.0", "playwright>=1.40.0", "pyperclip>=1.9.0", + "python-dotenv>=1.1.1", ] [project.scripts] diff --git a/uv.lock b/uv.lock index d5b8567d..825ed09d 100644 --- a/uv.lock +++ b/uv.lock @@ -835,6 +835,7 @@ dependencies = [ { name = "linkedin-scraper" }, { name = "playwright" }, { name = "pyperclip" }, + { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -855,6 +856,7 @@ requires-dist = [ { name = "linkedin-scraper", specifier = ">=3.0.0" }, { name = "playwright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] From b97411aecfaf1b55fbbcf3d70d02a2d607b644dc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 01:54:52 +0100 Subject: [PATCH 271/565] chore: update version to 2.0.0 and enhance README formatting - Bumped project version from 1.4.1 to 2.0.0 for major v2 update that uses playwright instead of selenium, tool progress notifications and better session management --- README.md | 3 +++ pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 598636d7..96cc662d 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,9 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c | `get_job_details` | Working | | `close_session` | Working | +
+
+ ## ๐Ÿณ Docker Setup (Recommended - Universal) **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. diff --git a/pyproject.toml b/pyproject.toml index e2c1b0fe..f78495bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "1.4.1" +version = "2.0.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" From 79b678b54ac1fe45c8bfad7ddeef87b5d572c82d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 00:55:27 +0000 Subject: [PATCH 272/565] chore(dxt): update manifest.json version to v2.0.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 9a448816..6dafe1a7 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "1.4.1", + "version": "2.0.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:1.4.1" + "stickerdaniel/linkedin-mcp-server:2.0.0" ] } }, From e162530c6ed06534df3b21e37c90c4bb100d3e65 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 02:00:33 +0100 Subject: [PATCH 273/565] fix(docker): create uv cache dir before cache mount The Playwright base image's pwuser doesn't have the cache directory pre-created, causing permission errors during multi-platform builds. --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index 40f7a157..d75fbc7f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,6 +13,9 @@ COPY --chown=pwuser:pwuser . /app # Switch to non-root user USER pwuser +# Create uv cache directory with correct ownership (required for cache mount) +RUN mkdir -p /home/pwuser/.cache/uv + # Sync dependencies and install project (with cache for faster rebuilds) RUN --mount=type=cache,target=/home/pwuser/.cache/uv,uid=1000,gid=1000 \ uv sync --frozen From 63ee8f4641fb82c31efb6084e9f98eaec281d853 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 02:04:17 +0100 Subject: [PATCH 274/565] chore: bump version to 2.0.1 in pyproject.toml to trigger auto release after fix --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f78495bd..e540004a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.0.0" +version = "2.0.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" From e0696ef521415c9c4969fe3e43215ee4dd9e6028 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 01:04:45 +0000 Subject: [PATCH 275/565] chore(dxt): update manifest.json version to v2.0.1 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 6dafe1a7..0b2e9fff 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.0.0", + "version": "2.0.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.0.0" + "stickerdaniel/linkedin-mcp-server:2.0.1" ] } }, From 993c61897335a3944b23c363fb8d8c250a0ebed7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 02:06:38 +0100 Subject: [PATCH 276/565] fix(docker): remove cache mount causing CI permission errors --- Dockerfile | 8 ++------ pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index d75fbc7f..6c6e7012 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,12 +13,8 @@ COPY --chown=pwuser:pwuser . /app # Switch to non-root user USER pwuser -# Create uv cache directory with correct ownership (required for cache mount) -RUN mkdir -p /home/pwuser/.cache/uv - -# Sync dependencies and install project (with cache for faster rebuilds) -RUN --mount=type=cache,target=/home/pwuser/.cache/uv,uid=1000,gid=1000 \ - uv sync --frozen +# Sync dependencies and install project +RUN uv sync --frozen # Set entrypoint and default arguments ENTRYPOINT ["uv", "run", "-m", "linkedin_mcp_server"] diff --git a/pyproject.toml b/pyproject.toml index e540004a..aab688f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.0.1" +version = "2.0.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" From d16322c9eaa99433eed39911487a1b0e4290ebcc Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 01:07:46 +0000 Subject: [PATCH 277/565] chore(dxt): update manifest.json version to v2.0.2 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 0b2e9fff..98637684 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.0.1", + "version": "2.0.2", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.0.1" + "stickerdaniel/linkedin-mcp-server:2.0.2" ] } }, From b398e00c0081d13e73bd6b6840ca0535363ed2e0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 09:18:25 +0100 Subject: [PATCH 278/565] docs(installation): uvx is the first and recommended installation method --- README.md | 228 +++++++++++++++++++++++++++--------------------------- 1 file changed, 114 insertions(+), 114 deletions(-) diff --git a/README.md b/README.md index 96cc662d..9da824a6 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,120 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c

-## ๐Ÿณ Docker Setup (Recommended - Universal) +## ๐Ÿš€ uvx Setup (Recommended - Universal) + +**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) installed. + +### Installation + +**Step 1: Create a session (first time only)** + +```bash +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ + linkedin-mcp-server --get-session +``` + +This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. + +**Step 2: Run the server** + +```bash +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server +``` + +> [!NOTE] +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use `--no-headless` to see the browser window. + +### uvx Setup Help + +
+๐Ÿ”ง Configuration + +**Client Configuration:** + +```json +{ + "mcpServers": { + "linkedin": { + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/stickerdaniel/linkedin-mcp-server", + "linkedin-mcp-server" + ] + } + } +} +``` + +**Transport Modes:** + +- **Default (stdio)**: Standard communication for local MCP servers +- **Streamable HTTP**: For web-based MCP server + +**CLI Options:** + +- `--no-headless` - Show browser window (useful for login and debugging) +- `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) +- `--transport {stdio,streamable-http}` - Set transport mode +- `--host HOST` - HTTP server host (default: 127.0.0.1) +- `--port PORT` - HTTP server port (default: 8000) +- `--path PATH` - HTTP server path (default: /mcp) +- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) +- `--clear-session` - Clear stored LinkedIn session file + +**Basic Usage Examples:** + +```bash +# Create a session interactively +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session + +# Run with debug logging +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --log-level DEBUG +``` + +**HTTP Mode Example (for web-based MCP clients):** + +```bash +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ + --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp +``` + +**Test with mcp inspector:** + +1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` +2. Click pre-filled token url to open the inspector in your browser +3. Select `Streamable HTTP` as `Transport Type` +4. Set `URL` to `http://localhost:8080/mcp` +5. Connect +6. Test tools + +
+ +
+โ— Troubleshooting + +**Installation issues:** + +- Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` +- Check uv version: `uv --version` (should be 0.4.0 or higher) + +**Session issues:** + +- Session is stored at `~/.linkedin-mcp/session.json` +- Make sure you have only one active LinkedIn session at a time + +**Login issues:** + +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` +- You might get a captcha challenge if you logged in frequently + +
+ +
+
+ +## ๐Ÿณ Docker Setup **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. @@ -219,119 +332,6 @@ docker run -it --rm \

-## ๐Ÿš€ uvx Setup (Quick Install - Universal) - -**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) installed. - -### Installation - -**Step 1: Create a session (first time only)** - -```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ - linkedin-mcp-server --get-session -``` - -This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. - -**Step 2: Run the server** - -```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server -``` - -> [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use `--no-headless` to see the browser window. - -### uvx Setup Help - -
-๐Ÿ”ง Configuration - -**Client Configuration:** - -```json -{ - "mcpServers": { - "linkedin": { - "command": "uvx", - "args": [ - "--from", - "git+https://github.com/stickerdaniel/linkedin-mcp-server", - "linkedin-mcp-server" - ] - } - } -} -``` - -**Transport Modes:** - -- **Default (stdio)**: Standard communication for local MCP servers -- **Streamable HTTP**: For web-based MCP server - -**CLI Options:** - -- `--no-headless` - Show browser window (useful for login and debugging) -- `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--transport {stdio,streamable-http}` - Set transport mode -- `--host HOST` - HTTP server host (default: 127.0.0.1) -- `--port PORT` - HTTP server port (default: 8000) -- `--path PATH` - HTTP server path (default: /mcp) -- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) -- `--clear-session` - Clear stored LinkedIn session file - -**Basic Usage Examples:** - -```bash -# Create a session interactively -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session - -# Run with debug logging -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --log-level DEBUG -``` - -**HTTP Mode Example (for web-based MCP clients):** - -```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ - --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp -``` - -**Test with mcp inspector:** - -1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` -2. Click pre-filled token url to open the inspector in your browser -3. Select `Streamable HTTP` as `Transport Type` -4. Set `URL` to `http://localhost:8080/mcp` -5. Connect -6. Test tools - -
- -
-โ— Troubleshooting - -**Installation issues:** - -- Ensure you have uv installed: `curl -LsSf https://astral.sh/uv/install.sh | sh` -- Check uv version: `uv --version` (should be 0.4.0 or higher) - -**Session issues:** - -- Session is stored at `~/.linkedin-mcp/session.json` -- Make sure you have only one active LinkedIn session at a time - -**Login issues:** - -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently - -
- -
-
- ## ๐Ÿ Local Setup (Develop & Contribute) **Prerequisites:** [Git](https://git-scm.com/downloads) and [uv](https://docs.astral.sh/uv/) installed From c98cbeda6908495388dc799bfa99f7b6bf550f57 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 09:21:35 +0100 Subject: [PATCH 279/565] docs(README): shorter features sectino --- README.md | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 9da824a6..4332bafb 100644 --- a/README.md +++ b/README.md @@ -32,24 +32,14 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ``` ## Features & Tool Status -> -> [!TIP] -> -> - **Profile Scraping** (`get_person_profile`): Get detailed information from a LinkedIn profile including work history, education, skills, and connections -> - **Company Analysis** (`get_company_profile`): Extract comprehensive company information from a LinkedIn company profile name -> - **Job Search** (`search_jobs`): Search for jobs with keywords and location filters -> - **Job Details** (`get_job_details`): Get detailed information about a specific job posting -> - **Session Management** (`close_session`): Properly close browser session and clean up resources - -**Tool Status:** - -| Tool | Status | -|------|--------| -| `get_person_profile` | Working | -| `get_company_profile` | Working | -| `search_jobs` | Broken (upstream) | -| `get_job_details` | Working | -| `close_session` | Working | + +| Tool | Description | Status | +|------|-------------|--------| +| `get_person_profile` | Get detailed profile info including work history, education, skills | Working | +| `get_company_profile` | Extract company information from a LinkedIn company name | Working | +| `search_jobs` | Search for jobs with keywords and location filters | Broken (upstream) | +| `get_job_details` | Get detailed information about a specific job posting | Working | +| `close_session` | Close browser session and clean up resources | Working |

From 0fd12aefcb80236cd3961c3b46697f23512b4580 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 12 Jan 2026 10:15:44 +0100 Subject: [PATCH 280/565] docs(README): Add Playwright installation to README prerequisites Updated prerequisites to include Playwright installation. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4332bafb..4e9bf188 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ## ๐Ÿš€ uvx Setup (Recommended - Universal) -**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) installed. +**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) and Playwright `uvx playwright install chromium` installed. ### Installation From edadcf108f0a229da7a4ca82a44aa94a1b7cbbdb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 10:32:36 +0100 Subject: [PATCH 281/565] fix(browser): validate session requires navigating to LinkedIn feed first --- linkedin_mcp_server/drivers/browser.py | 3 ++- uv.lock | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index d96ec892..de7ff4ad 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -68,7 +68,8 @@ async def get_or_create_browser( try: await _browser.load_session(str(session_path)) logger.info(f"Loaded session from {session_path}") - # Validate session is actually logged in + # Navigate to LinkedIn to validate session + await _browser.page.goto("https://www.linkedin.com/feed/") if await is_logged_in(_browser.page): _browser.page.set_default_timeout( 5000 diff --git a/uv.lock b/uv.lock index 825ed09d..db6935f9 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "1.4.1" +version = "2.0.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 5696de018afed5b7b3e07e4e62be5fb707703142 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 10:38:32 +0100 Subject: [PATCH 282/565] docs(README): streamline command examples and remove star section --- README.md | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 4e9bf188..ab31800c 100644 --- a/README.md +++ b/README.md @@ -53,8 +53,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c **Step 1: Create a session (first time only)** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server \ - linkedin-mcp-server --get-session +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session ``` This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. @@ -119,8 +118,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **HTTP Mode Example (for web-based MCP clients):** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ - --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp ``` **Test with mcp inspector:** @@ -422,14 +420,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. -## Star History - - - - - Star History Chart - - ## License This project is licensed under the Apache 2.0 license. From a210849efcc57021b3a4f01556491baff159a811 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 10:53:38 +0100 Subject: [PATCH 283/565] feat(config): add configurable browser timeout Add --timeout CLI argument and DEFAULT_TIMEOUT environment variable to configure Playwright page operation timeout. Default remains 5000ms. Resolves: #81 --- README.md | 40 ++++++++++++++++++++++++-- linkedin_mcp_server/config/loaders.py | 26 +++++++++++++++++ linkedin_mcp_server/config/schema.py | 1 + linkedin_mcp_server/drivers/browser.py | 9 +++--- 4 files changed, 70 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index ab31800c..40d465e9 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use `--no-headless` to see the browser window. +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. ### uvx Setup Help @@ -104,6 +104,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - `--path PATH` - HTTP server path (default: /mcp) - `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file +- `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) **Basic Usage Examples:** @@ -118,7 +119,8 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **HTTP Mode Example (for web-based MCP clients):** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp +uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ + --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp ``` **Test with mcp inspector:** @@ -150,6 +152,12 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` - You might get a captcha challenge if you logged in frequently +**Timeout issues:** + +- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` +- Users on slow connections may need higher values (e.g., 15000-30000ms) +- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +
@@ -245,6 +253,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - `--path PATH` - HTTP server path (default: /mcp) - `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file +- `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) **HTTP Mode Example (for web-based MCP clients):** @@ -281,6 +290,12 @@ docker run -it --rm \ - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` - You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) +**Timeout issues:** + +- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` +- Users on slow connections may need higher values (e.g., 15000-30000ms) +- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +
@@ -315,6 +330,12 @@ docker run -it --rm \ - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` - You might get a captcha challenge if you logged in frequently, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode +**Timeout issues:** + +- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` +- Users on slow connections may need higher values (e.g., 15000-30000ms) +- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +
@@ -364,6 +385,7 @@ uv run -m linkedin_mcp_server --no-headless - `--path PATH` - HTTP server path (default: /mcp) - `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file +- `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--help` - Show help **HTTP Mode Example (for web-based MCP clients):** @@ -407,6 +429,12 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por - Reinstall Playwright: `uv run playwright install chromium` - Reinstall dependencies: `uv sync --reinstall` +**Timeout issues:** + +- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` +- Users on slow connections may need higher values (e.g., 15000-30000ms) +- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` + Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! @@ -420,6 +448,14 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +## Star History + + + + + Star History Chart + + ## License This project is licensed under the Apache 2.0 license. diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 99adbed5..4b428378 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -30,6 +30,7 @@ class EnvironmentKeys: LOG_LEVEL = "LOG_LEVEL" TRANSPORT = "TRANSPORT" LINKEDIN_COOKIE = "LINKEDIN_COOKIE" + DEFAULT_TIMEOUT = "DEFAULT_TIMEOUT" def is_interactive_environment() -> bool: @@ -72,6 +73,17 @@ def load_from_env(config: AppConfig) -> AppConfig: if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): config.server.linkedin_cookie = cookie + # Default timeout for page operations + if timeout_env := os.environ.get(EnvironmentKeys.DEFAULT_TIMEOUT): + try: + timeout_ms = int(timeout_env) + if timeout_ms > 0: + config.browser.default_timeout = timeout_ms + else: + logger.warning(f"Invalid timeout: {timeout_env}, must be positive") + except ValueError: + logger.warning(f"Invalid timeout value: {timeout_env}, using default") + return config @@ -145,6 +157,14 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Browser viewport size (default: 1280x720)", ) + parser.add_argument( + "--timeout", + type=int, + default=None, + metavar="MS", + help="Browser timeout for page operations in milliseconds (default: 5000)", + ) + # Session management parser.add_argument( "--get-session", @@ -204,6 +224,12 @@ def load_from_args(config: AppConfig) -> AppConfig: except ValueError: logger.warning(f"Invalid viewport format: {args.viewport}, using default") + if args.timeout is not None: + if args.timeout > 0: + config.browser.default_timeout = args.timeout + else: + logger.warning(f"Invalid timeout: {args.timeout}, must be positive") + # Session management if args.get_session is not None: config.server.get_session = True diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index aaff72e8..3a427675 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -24,6 +24,7 @@ class BrowserConfig: user_agent: str | None = None # Custom browser user agent viewport_width: int = 1280 viewport_height: int = 720 + default_timeout: int = 5000 # Milliseconds for page operations @dataclass diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index de7ff4ad..bb834025 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -18,6 +18,7 @@ ) from linkedin_scraper.core import detect_rate_limit +from linkedin_mcp_server.config import get_config from linkedin_mcp_server.utils import get_linkedin_cookie logger = logging.getLogger(__name__) @@ -71,9 +72,8 @@ async def get_or_create_browser( # Navigate to LinkedIn to validate session await _browser.page.goto("https://www.linkedin.com/feed/") if await is_logged_in(_browser.page): - _browser.page.set_default_timeout( - 5000 - ) # 5s timeout for element operations + config = get_config() + _browser.page.set_default_timeout(config.browser.default_timeout) return _browser logger.warning( "Session loaded but expired, trying to create session from cookie" @@ -86,7 +86,8 @@ async def get_or_create_browser( try: await login_with_cookie(_browser.page, cookie) logger.info("Authenticated using LINKEDIN_COOKIE") - _browser.page.set_default_timeout(5000) # 5s timeout for element operations + config = get_config() + _browser.page.set_default_timeout(config.browser.default_timeout) return _browser except Exception as e: logger.warning(f"Cookie authentication failed: {e}") From ffebbe39cff436864e1cc7d67d1a0f6af1d4c84d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:02:59 +0100 Subject: [PATCH 284/565] feat(config): enhance timeout validation and refactor browser settings application - Introduced a new `positive_int` function for validating positive integer inputs. - Updated timeout validation in `load_from_env` and `load_from_args` to raise `ConfigurationError` for invalid values. - Refactored browser timeout application into a separate `_apply_browser_settings` function for better code organization. --- linkedin_mcp_server/config/loaders.py | 30 ++++++++++++++++---------- linkedin_mcp_server/drivers/browser.py | 12 +++++++---- 2 files changed, 27 insertions(+), 15 deletions(-) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 4b428378..6ae6f44f 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -11,7 +11,7 @@ from dotenv import load_dotenv -from .schema import AppConfig +from .schema import AppConfig, ConfigurationError # Load .env file if present load_dotenv() @@ -23,6 +23,14 @@ FALSY_VALUES = ("0", "false", "False", "no", "No") +def positive_int(value: str) -> int: + """Argparse type for positive integers.""" + ivalue = int(value) + if ivalue <= 0: + raise argparse.ArgumentTypeError(f"must be positive, got {value}") + return ivalue + + class EnvironmentKeys: """Environment variable names used by the application.""" @@ -77,12 +85,15 @@ def load_from_env(config: AppConfig) -> AppConfig: if timeout_env := os.environ.get(EnvironmentKeys.DEFAULT_TIMEOUT): try: timeout_ms = int(timeout_env) - if timeout_ms > 0: - config.browser.default_timeout = timeout_ms - else: - logger.warning(f"Invalid timeout: {timeout_env}, must be positive") + if timeout_ms <= 0: + raise ConfigurationError( + f"Invalid DEFAULT_TIMEOUT: {timeout_env}. Must be a positive integer." + ) + config.browser.default_timeout = timeout_ms except ValueError: - logger.warning(f"Invalid timeout value: {timeout_env}, using default") + raise ConfigurationError( + f"Invalid DEFAULT_TIMEOUT: '{timeout_env}'. Must be an integer." + ) return config @@ -159,7 +170,7 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--timeout", - type=int, + type=positive_int, default=None, metavar="MS", help="Browser timeout for page operations in milliseconds (default: 5000)", @@ -225,10 +236,7 @@ def load_from_args(config: AppConfig) -> AppConfig: logger.warning(f"Invalid viewport format: {args.viewport}, using default") if args.timeout is not None: - if args.timeout > 0: - config.browser.default_timeout = args.timeout - else: - logger.warning(f"Invalid timeout: {args.timeout}, must be positive") + config.browser.default_timeout = args.timeout # Session management if args.get_session is not None: diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index bb834025..3eda44a6 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -32,6 +32,12 @@ _headless: bool = True +def _apply_browser_settings(browser: BrowserManager) -> None: + """Apply configuration settings to browser instance.""" + config = get_config() + browser.page.set_default_timeout(config.browser.default_timeout) + + async def get_or_create_browser( headless: bool | None = None, session_path: Path | None = None, @@ -72,8 +78,7 @@ async def get_or_create_browser( # Navigate to LinkedIn to validate session await _browser.page.goto("https://www.linkedin.com/feed/") if await is_logged_in(_browser.page): - config = get_config() - _browser.page.set_default_timeout(config.browser.default_timeout) + _apply_browser_settings(_browser) return _browser logger.warning( "Session loaded but expired, trying to create session from cookie" @@ -86,8 +91,7 @@ async def get_or_create_browser( try: await login_with_cookie(_browser.page, cookie) logger.info("Authenticated using LINKEDIN_COOKIE") - config = get_config() - _browser.page.set_default_timeout(config.browser.default_timeout) + _apply_browser_settings(_browser) return _browser except Exception as e: logger.warning(f"Cookie authentication failed: {e}") From 1b81ea9889a6e98e530afd27a7bb35e36994d6d9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:05:07 +0100 Subject: [PATCH 285/565] docs(README): remove star history section from README --- README.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/README.md b/README.md index 40d465e9..e486c8f0 100644 --- a/README.md +++ b/README.md @@ -448,14 +448,6 @@ Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@ โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. -## Star History - - - - - Star History Chart - - ## License This project is licensed under the Apache 2.0 license. From 3c47f024ae304529ba6f80587c9132a55da41b26 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:15:16 +0100 Subject: [PATCH 286/565] chore: bump version to 2.1.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index aab688f2..8647d8a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.0.2" +version = "2.1.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index db6935f9..e6f074db 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.0.2" +version = "2.1.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 0336687ba18c9aafac71e57b2ed8e998b3eb8108 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 10:15:48 +0000 Subject: [PATCH 287/565] chore(dxt): update manifest.json version to v2.1.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 98637684..b06cb60d 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.0.2", + "version": "2.1.0", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.0.2" + "stickerdaniel/linkedin-mcp-server:2.1.0" ] } }, From ed32fa08a446c6e747edca3368c525a265580db5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:37:50 +0100 Subject: [PATCH 288/565] fix(manifest): remove stale get_recommended_jobs tool reference --- .github/ISSUE_TEMPLATE/bug_report.md | 1 - manifest.json | 4 ---- 2 files changed, 5 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 394e9546..0ba70198 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -19,7 +19,6 @@ assignees: '' - [ ] get_company_profile - [ ] get_job_details - [ ] search_jobs - - [ ] get_recommended_jobs - [ ] close_session ## MCP Client Configuration diff --git a/manifest.json b/manifest.json index b06cb60d..42f4945d 100644 --- a/manifest.json +++ b/manifest.json @@ -47,10 +47,6 @@ "name": "search_jobs", "description": "Search for jobs with filters like keywords and location" }, - { - "name": "get_recommended_jobs", - "description": "Get personalized job recommendations based on your profile" - }, { "name": "close_session", "description": "Properly close browser session and clean up resources" From da9cd11717e5c081ddd3094f5aa22ad308684828 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:49:24 +0100 Subject: [PATCH 289/565] fix(browser): pass config options to BrowserManager Pass slow_mo, user_agent, and viewport from BrowserConfig to BrowserManager when creating the browser instance. Resolves: #85 --- linkedin_mcp_server/drivers/browser.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 3eda44a6..eec6906b 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -66,8 +66,18 @@ async def get_or_create_browser( if _browser is not None: return cast(BrowserManager, _browser) + config = get_config() + viewport = { + "width": config.browser.viewport_width, + "height": config.browser.viewport_height, + } logger.info(f"Creating new browser (headless={_headless})") - _browser = BrowserManager(headless=_headless) + _browser = BrowserManager( + headless=_headless, + slow_mo=config.browser.slow_mo, + user_agent=config.browser.user_agent, + viewport=viewport, + ) await _browser.start() # Priority 1: Load session file if available From 0bb3148dc8343330f650fc7051ff6f55c29fdc7b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 11:54:09 +0100 Subject: [PATCH 290/565] fix(dxt): use ${HOME} instead of tilde in Docker volume mount Replace ~ with ${HOME} in manifest.json Docker volume mount path. Tilde expansion only works in shell context, not when Docker is invoked directly by Claude Desktop or DXT extension. Resolves: #87 --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 42f4945d..1ff69a76 100644 --- a/manifest.json +++ b/manifest.json @@ -24,7 +24,7 @@ "command": "docker", "args": [ "run", "--rm", "-i", - "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", + "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", "stickerdaniel/linkedin-mcp-server:2.1.0" ] From d442832d8fdedfc50f5aec8689fc19e708f1a19b Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 12 Jan 2026 11:57:15 +0100 Subject: [PATCH 291/565] Update browser.py --- linkedin_mcp_server/drivers/browser.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index eec6906b..ce43ddaa 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -71,7 +71,13 @@ async def get_or_create_browser( "width": config.browser.viewport_width, "height": config.browser.viewport_height, } - logger.info(f"Creating new browser (headless={_headless})") + logger.info( + "Creating new browser (headless=%s, slow_mo=%sms, viewport=%sx%s)", + _headless, + config.browser.slow_mo, + viewport["width"], + viewport["height"], + ) _browser = BrowserManager( headless=_headless, slow_mo=config.browser.slow_mo, From 70fb1c79ae2e800f67fa7982b1949ff011996216 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 12:01:51 +0100 Subject: [PATCH 292/565] docs(readme): reorder badges to match section order MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reorder Installation Methods badges to match paragraph order: uvx โ†’ Docker โ†’ DXT โ†’ Development Fix anchor links for Docker and uvx badges. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e486c8f0..9a9518c5 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,9 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your ## Installation Methods -[![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended---universal) +[![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-recommended---universal) +[![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_DXT-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) -[![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-quick-install---universal) [![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) From e42fb58342fcf4e02c40dfd5da80ac084233a409 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 12:08:59 +0100 Subject: [PATCH 293/565] chore: bump version to 2.1.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8647d8a4..c7d80ab4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.1.0" +version = "2.1.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index e6f074db..fa4c2a35 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.1.0" +version = "2.1.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From f63837525a10cdec2e547676b897f551fa1c6573 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 11:10:09 +0000 Subject: [PATCH 294/565] chore(dxt): update manifest.json version to v2.1.1 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 1ff69a76..5cc8601e 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.1.0", + "version": "2.1.1", "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.1.0" + "stickerdaniel/linkedin-mcp-server:2.1.1" ] } }, From 54f3cbf0ce23f1926d00270b2ad795c1ddce004a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 12:54:36 +0100 Subject: [PATCH 295/565] fix(dxt): update manifest descriptions and remove invalid compatibility keys --- manifest.json | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/manifest.json b/manifest.json index 5cc8601e..df569fe9 100644 --- a/manifest.json +++ b/manifest.json @@ -3,8 +3,8 @@ "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", "version": "2.1.1", - "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", - "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", + "description": "Connect Claude to LinkedIn for profiles, companies, and job details", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -55,8 +55,6 @@ "user_config": {}, "compatibility": { "claude_desktop": ">=0.10.0", - "dxt_version": ">=0.1", - "docker_version": ">=20.0.0", "platforms": ["darwin", "linux", "win32"] } } From ee39269065b72f0d47d8af36e0d0db55e0c203c0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 14:31:33 +0100 Subject: [PATCH 296/565] ci(docker): auto-update Docker Hub description on release --- .github/workflows/release.yml | 8 +++++ docs/docker-hub.md | 57 +++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 docs/docker-hub.md diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7ca7d3e0..a09fa568 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -125,6 +125,14 @@ jobs: cache-from: type=gha cache-to: type=gha,mode=max + - name: Update Docker Hub description + uses: peter-evans/dockerhub-description@v5 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: stickerdaniel/linkedin-mcp-server + readme-filepath: docs/docker-hub.md + - name: Optimize uv cache for CI run: uv cache prune --ci diff --git a/docs/docker-hub.md b/docs/docker-hub.md new file mode 100644 index 00000000..64c2e166 --- /dev/null +++ b/docs/docker-hub.md @@ -0,0 +1,57 @@ +# LinkedIn MCP Server + +A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. Access profiles, companies, and job postings through a Docker container. + +## Features +- **Profile Access**: Get detailed LinkedIn profile information +- **Company Profiles**: Extract comprehensive company data +- **Job Details**: Retrieve job posting information +- **Job Search**: Search for jobs with keywords and location filters + +## Quick Start + +### Option 1: Cookie Authentication (Simplest) + +Pass your LinkedIn `li_at` cookie - session will be created and stored automatically. + +> **Note:** If you encounter authentication challenges, use Option 2 instead. + +```json +{ + "mcpServers": { + "linkedin": { + "command": "docker", + "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "stickerdaniel/linkedin-mcp-server"], + "env": { + "LINKEDIN_COOKIE": "your_li_at_cookie_value" + } + } + } +} +``` + +### Option 2: Browser Login via uvx + +Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal), then mount it: + +```json +{ + "mcpServers": { + "linkedin": { + "command": "docker", + "args": [ + "run", "--rm", "-i", + "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", + "stickerdaniel/linkedin-mcp-server:latest" + ] + } + } +} +``` + +> **Note:** Docker containers don't have a display server. If you encounter authentication issues with cookie auth, use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to create a session on your host machine. + +## Repository +- **Source**: https://github.com/stickerdaniel/linkedin-mcp-server +- **Documentation**: Full setup and usage guide in README +- **License**: Apache 2.0 From 40b8ef44a8712bd736a4ab27f284a860fd1272db Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 15:17:34 +0100 Subject: [PATCH 297/565] docs(readme): update documentation for consistency and accuracy --- README.md | 54 +++++++++++++++++++++++++++------------------- docs/docker-hub.md | 7 +++--- 2 files changed, 36 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 9a9518c5..5df76385 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ License

-Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Give access to profiles and companies, search for jobs, or get job details. All from a Docker container on your machine. +Through this LinkedIn MCP server, AI assistants like Claude can connect to your LinkedIn. Access profiles and companies, search for jobs, or get job details. ## Installation Methods @@ -41,6 +41,9 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | +> [!WARNING] +> The session file at `~/.linkedin-mcp/session.json` contains sensitive authentication data. Keep it secure and do not share it. +

@@ -96,13 +99,13 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **CLI Options:** -- `--no-headless` - Show browser window (useful for login and debugging) +- `--get-session [PATH]` - Open browser to log in and save session (default: ~/.linkedin-mcp/session.json) +- `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) @@ -150,7 +153,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **Login issues:** - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently +- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve it manually. **Timeout issues:** @@ -227,11 +230,10 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, > [!NOTE] > Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. -> [!WARNING] -> The session file at `~/.linkedin-mcp/session.json` contains sensitive authentication data. Keep it secure and do not share it. - > [!NOTE] -> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server, so Playwright can't show a browser window. You must create the session on your host machine first, then mount it into Docker. +> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server. You have two options: +> 1. Create a session on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker +> 2. Pass your `li_at` cookie via `LINKEDIN_COOKIE` (if you encounter auth challenges, use option 1 instead) ### Docker Setup Help @@ -245,16 +247,17 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, **CLI Options:** -- `--no-headless` - Show browser window (useful for login and debugging) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +> [!NOTE] +> `--get-session` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create sessions. + **HTTP Mode Example (for web-based MCP clients):** ```bash @@ -288,7 +291,7 @@ docker run -it --rm \ - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in a lot of times in a short period of time, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode where you can debug the login process (solve captcha manually) +- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** @@ -309,10 +312,10 @@ docker run -it --rm \ 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop -3. Create a session using `--get-session` (see Docker instructions above) +3. Create a session: `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. For debugging login issues, use the [local setup](#-local-setup-develop--contribute) with `--no-headless` mode. +> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. ### DXT Extension Setup Help @@ -328,7 +331,7 @@ docker run -it --rm \ - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently, then try again later or follow the [local setup instructions](#-local-setup-develop--contribute) to run the server manually in --no-headless mode +- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** @@ -365,9 +368,11 @@ uv run playwright install chromium # 5. Install pre-commit hooks uv run pre-commit install -# 6. Start the server (first run opens browser for manual login) -# Login in the browser window - session will be saved to ~/.linkedin-mcp/session.json -uv run -m linkedin_mcp_server --no-headless +# 6. Create a session (first time only) +uv run -m linkedin_mcp_server --get-session + +# 7. Start the server +uv run -m linkedin_mcp_server ``` ### Local Setup Help @@ -377,13 +382,13 @@ uv run -m linkedin_mcp_server --no-headless **CLI Options:** -- `--no-headless` - Show browser window (useful for login and debugging) +- `--get-session [PATH]` - Open browser to log in and save session (default: ~/.linkedin-mcp/session.json) +- `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--get-session [PATH]` - Login interactively and save session (default: ~/.linkedin-mcp/session.json) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--help` - Show help @@ -412,11 +417,16 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por
โ— Troubleshooting -**Login/Scraping issues:** +**Login issues:** -- Use `--no-headless` to see browser actions (captcha challenge, LinkedIn mobile app 2fa, ...) -- Add `--log-level DEBUG` to see more detailed logging - Make sure you have only one active LinkedIn session at a time +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` +- You might get a captcha challenge if you logged in frequently. The `--get-session` command opens a browser where you can solve it manually. + +**Scraping issues:** + +- Use `--no-headless` to see browser actions and debug scraping problems +- Add `--log-level DEBUG` to see more detailed logging **Session issues:** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 64c2e166..36a36f8b 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -3,6 +3,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. Access profiles, companies, and job postings through a Docker container. ## Features + - **Profile Access**: Get detailed LinkedIn profile information - **Company Profiles**: Extract comprehensive company data - **Job Details**: Retrieve job posting information @@ -49,9 +50,9 @@ Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin } ``` -> **Note:** Docker containers don't have a display server. If you encounter authentication issues with cookie auth, use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to create a session on your host machine. +> **Note:** Docker containers don't have a display server, so you can't use the `--get-session` command in Docker. ## Repository -- **Source**: https://github.com/stickerdaniel/linkedin-mcp-server -- **Documentation**: Full setup and usage guide in README + +- **Source**: - **License**: Apache 2.0 From 4350e3436a18f688b2376911fd7c2f06c25805f9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 12 Jan 2026 15:17:56 +0100 Subject: [PATCH 298/565] chore: bump version to 2.1.2 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c7d80ab4..c60fabeb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.1.1" +version = "2.1.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index fa4c2a35..dd9d90b7 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.1.1" +version = "2.1.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From e0d96218fafdaf0a15a21a1ca5f09b0d617f6a16 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 14:28:45 +0000 Subject: [PATCH 299/565] chore(dxt): update manifest.json version to v2.1.2 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index df569fe9..2be73299 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.1.1", + "version": "2.1.2", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.1.1" + "stickerdaniel/linkedin-mcp-server:2.1.2" ] } }, From 063fa72fb02393f1551022efc00d5be91a1e1223 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 12 Jan 2026 22:32:38 +0100 Subject: [PATCH 300/565] docs(README): mention Docker should be running, not just installed --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5df76385..0cc25c0e 100644 --- a/README.md +++ b/README.md @@ -306,7 +306,7 @@ docker run -it --rm \ ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) -**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed +**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed & running **One-click installation** for Claude Desktop users: From 9ba87bee7f6b87fbb4076fea65b2c9f41ad84737 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Jan 2026 21:50:04 +0000 Subject: [PATCH 301/565] chore(deps): pin dependencies --- .github/workflows/ci.yml | 6 +++--- .github/workflows/claude.yml | 4 ++-- .github/workflows/release.yml | 20 ++++++++++---------- Dockerfile | 4 ++-- docker-compose.yml | 2 +- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b7322853..3a87a177 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,10 +12,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 - name: Set up uv - uses: astral-sh/setup-uv@v7 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 with: enable-cache: true @@ -25,7 +25,7 @@ jobs: uv sync --group dev - name: Run pre-commit hooks - uses: pre-commit/action@v3.0.1 + uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 - name: Optimize uv cache for CI run: uv cache prune --ci diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 50f44994..5f9842e1 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -26,13 +26,13 @@ jobs: actions: read # Required for Claude to read CI results on PRs steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: fetch-depth: 1 - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@v1 + uses: anthropics/claude-code-action@1b8ee3b94104046d71fde52ec3557651ad8c0d71 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a09fa568..8c444191 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,12 +13,12 @@ jobs: should-release: ${{ steps.check.outputs.should-release }} new-version: ${{ steps.check.outputs.new-version }} steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@v7 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 with: enable-cache: true @@ -57,17 +57,17 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@v7 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 with: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@v2 + uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2 - name: Update manifest.json version and Docker image run: | @@ -105,16 +105,16 @@ jobs: fi - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 - name: Log in to Docker Hub - uses: docker/login-action@v3 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Build and push Docker images - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6 with: context: . push: true @@ -126,7 +126,7 @@ jobs: cache-to: type=gha,mode=max - name: Update Docker Hub description - uses: peter-evans/dockerhub-description@v5 + uses: peter-evans/dockerhub-description@1b9a80c056b620d92cedb9d9b5a223409c68ddfa # v5 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -149,7 +149,7 @@ jobs: - name: Create GitHub Release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: softprops/action-gh-release@v2 + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 with: tag_name: v${{ env.VERSION }} files: | diff --git a/Dockerfile b/Dockerfile index 6c6e7012..503557d5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ -FROM mcr.microsoft.com/playwright/python:v1.57.0-noble +FROM mcr.microsoft.com/playwright/python:v1.57.0-noble@sha256:3de745b23fc4b33fccbcb3f592ee52dd5c80ce79f19f839c825ce23364e403c1 # Install uv -COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9a23023be68b2ed09750ae636228e903a54a05ea56ed03a934d00fe9fbeded4b /uv /uvx /bin/ # Set working directory and fix ownership WORKDIR /app diff --git a/docker-compose.yml b/docker-compose.yml index 5acc1821..200d5ff4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:latest + image: stickerdaniel/linkedin-mcp-server:latest@sha256:5340e9f5826b375a08172918f83aee74ef1e2232715147e5beb372ca014f7187 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: From c8819ce8f5cc95201374387dd365bdff9d2fd91d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 16 Jan 2026 05:20:23 +0000 Subject: [PATCH 302/565] chore(deps): update oven-sh/setup-bun digest to db6bcf6 --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8c444191..09559f56 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,7 +67,7 @@ jobs: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2 + uses: oven-sh/setup-bun@db6bcf6eb8d88a8aa03265b887ec7bd84d64cd68 # v2 - name: Update manifest.json version and Docker image run: | From c25ff4b6dba0d417c96225d20cb2f7d54c56c409 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 16 Jan 2026 06:22:03 +0100 Subject: [PATCH 303/565] docs(README): add incognito cookie retrieval instructions for LinkedIn Updated instructions to use an incognito tab for obtaining the 'li_at' cookie. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0cc25c0e..d8fe3c89 100644 --- a/README.md +++ b/README.md @@ -194,7 +194,7 @@ Get your LinkedIn `li_at` cookie and pass it to Docker: **To get your `li_at` cookie:** -1. Open LinkedIn in your browser and log in +1. Open LinkedIn in your browser in an **incognito** tab and log in 2. Open DevTools (F12) โ†’ Application โ†’ Cookies โ†’ linkedin.com 3. Copy the `li_at` cookie value From 5475c26edb7f59cad6ab0373d562cf1f3cf29e45 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 16 Jan 2026 17:35:30 +0000 Subject: [PATCH 304/565] chore(deps): update oven-sh/setup-bun digest to 3d26778 --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 09559f56..dda24394 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,7 +67,7 @@ jobs: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@db6bcf6eb8d88a8aa03265b887ec7bd84d64cd68 # v2 + uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2 - name: Update manifest.json version and Docker image run: | From b5fa7ebdee6a2921a71129856f8a6578b60959b1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 17 Jan 2026 01:43:42 +0000 Subject: [PATCH 305/565] chore(deps): update anthropics/claude-code-action digest to a017b83 --- .github/workflows/claude.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 5f9842e1..f35ee4dc 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@1b8ee3b94104046d71fde52ec3557651ad8c0d71 # v1 + uses: anthropics/claude-code-action@a017b830c03e23789b11fb69ed571ea61c12e45c # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} From 77e159f53f4d3d94b68419efcea177dbe4d76fbc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 15:45:59 +0100 Subject: [PATCH 306/565] refactor(config): centralize validation in schema classes Move semantic validation (ranges, positive values) from loaders to schema classes. Add BrowserConfig.validate() for viewport, timeout, and slow_mo validation. Call validate() at end of load_config(). - Add new env vars: TIMEOUT, USER_AGENT, HOST, PORT, HTTP_PATH, SLOW_MO, VIEWPORT - Add --linkedin-cookie CLI argument - Fix --viewport default to None (was overwriting env vars) - Change viewport CLI error from warning to ConfigurationError --- linkedin_mcp_server/config/loaders.py | 84 +++++++++++++++++++++++---- linkedin_mcp_server/config/schema.py | 20 ++++++- 2 files changed, 90 insertions(+), 14 deletions(-) diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 6ae6f44f..f7cbdd6b 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -38,7 +38,13 @@ class EnvironmentKeys: LOG_LEVEL = "LOG_LEVEL" TRANSPORT = "TRANSPORT" LINKEDIN_COOKIE = "LINKEDIN_COOKIE" - DEFAULT_TIMEOUT = "DEFAULT_TIMEOUT" + TIMEOUT = "TIMEOUT" + USER_AGENT = "USER_AGENT" + HOST = "HOST" + PORT = "PORT" + HTTP_PATH = "HTTP_PATH" + SLOW_MO = "SLOW_MO" + VIEWPORT = "VIEWPORT" def is_interactive_environment() -> bool: @@ -76,23 +82,61 @@ def load_from_env(config: AppConfig) -> AppConfig: config.server.transport = "stdio" elif transport_env == "streamable-http": config.server.transport = "streamable-http" + else: + raise ConfigurationError( + f"Invalid TRANSPORT: '{transport_env}'. Must be 'stdio' or 'streamable-http'." + ) # LinkedIn cookie for headless auth if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): config.server.linkedin_cookie = cookie - # Default timeout for page operations - if timeout_env := os.environ.get(EnvironmentKeys.DEFAULT_TIMEOUT): + # Timeout for page operations (semantic validation in BrowserConfig.__post_init__) + if timeout_env := os.environ.get(EnvironmentKeys.TIMEOUT): + try: + config.browser.default_timeout = int(timeout_env) + except ValueError: + raise ConfigurationError( + f"Invalid TIMEOUT: '{timeout_env}'. Must be an integer." + ) + + # Custom user agent + if user_agent_env := os.environ.get(EnvironmentKeys.USER_AGENT): + config.browser.user_agent = user_agent_env + + # HTTP server host + if host_env := os.environ.get(EnvironmentKeys.HOST): + config.server.host = host_env + + # HTTP server port (range validation in AppConfig.__post_init__) + if port_env := os.environ.get(EnvironmentKeys.PORT): + try: + config.server.port = int(port_env) + except ValueError: + raise ConfigurationError(f"Invalid PORT: '{port_env}'. Must be an integer.") + + # HTTP server path + if path_env := os.environ.get(EnvironmentKeys.HTTP_PATH): + config.server.path = path_env + + # Slow motion delay for debugging (semantic validation in BrowserConfig.__post_init__) + if slow_mo_env := os.environ.get(EnvironmentKeys.SLOW_MO): + try: + config.browser.slow_mo = int(slow_mo_env) + except ValueError: + raise ConfigurationError( + f"Invalid SLOW_MO: '{slow_mo_env}'. Must be an integer." + ) + + # Browser viewport (dimension validation in BrowserConfig.__post_init__) + if viewport_env := os.environ.get(EnvironmentKeys.VIEWPORT): try: - timeout_ms = int(timeout_env) - if timeout_ms <= 0: - raise ConfigurationError( - f"Invalid DEFAULT_TIMEOUT: {timeout_env}. Must be a positive integer." - ) - config.browser.default_timeout = timeout_ms + width, height = viewport_env.lower().split("x") + config.browser.viewport_width = int(width) + config.browser.viewport_height = int(height) except ValueError: raise ConfigurationError( - f"Invalid DEFAULT_TIMEOUT: '{timeout_env}'. Must be an integer." + f"Invalid VIEWPORT: '{viewport_env}'. Must be in format WxH (e.g., 1280x720)." ) return config @@ -163,7 +207,7 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--viewport", type=str, - default="1280x720", + default=None, metavar="WxH", help="Browser viewport size (default: 1280x720)", ) @@ -198,6 +242,13 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Clear stored LinkedIn session file", ) + parser.add_argument( + "--linkedin-cookie", + type=str, + default=None, + help="LinkedIn session cookie (li_at) for authentication", + ) + args = parser.parse_args() # Update configuration with parsed arguments @@ -227,13 +278,16 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.user_agent: config.browser.user_agent = args.user_agent + # Viewport (dimension validation in BrowserConfig.__post_init__) if args.viewport: try: width, height = args.viewport.lower().split("x") config.browser.viewport_width = int(width) config.browser.viewport_height = int(height) except ValueError: - logger.warning(f"Invalid viewport format: {args.viewport}, using default") + raise ConfigurationError( + f"Invalid --viewport: '{args.viewport}'. Must be in format WxH (e.g., 1280x720)." + ) if args.timeout is not None: config.browser.default_timeout = args.timeout @@ -249,6 +303,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.clear_session: config.server.clear_session = True + if args.linkedin_cookie: + config.server.linkedin_cookie = args.linkedin_cookie + return config @@ -277,4 +334,7 @@ def load_config() -> AppConfig: # Override with command line arguments (highest priority) config = load_from_args(config) + # Validate final configuration + config.validate() + return config diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 3a427675..bbba970f 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -26,6 +26,21 @@ class BrowserConfig: viewport_height: int = 720 default_timeout: int = 5000 # Milliseconds for page operations + def validate(self) -> None: + """Validate browser configuration values.""" + if self.slow_mo < 0: + raise ConfigurationError( + f"slow_mo must be non-negative, got {self.slow_mo}" + ) + if self.default_timeout <= 0: + raise ConfigurationError( + f"default_timeout must be positive, got {self.default_timeout}" + ) + if self.viewport_width <= 0 or self.viewport_height <= 0: + raise ConfigurationError( + f"viewport dimensions must be positive, got {self.viewport_width}x{self.viewport_height}" + ) + @dataclass class ServerConfig: @@ -54,8 +69,9 @@ class AppConfig: server: ServerConfig = field(default_factory=ServerConfig) is_interactive: bool = field(default=False) - def __post_init__(self) -> None: - """Validate configuration after initialization.""" + def validate(self) -> None: + """Validate all configuration values. Call after modifying config.""" + self.browser.validate() if self.server.transport == "streamable-http": self._validate_transport_config() self._validate_path_format() From de8d7a525620fa7ee784eede378bcb2e8e07390c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 16:26:44 +0100 Subject: [PATCH 307/565] perf(docker): reduce image size by using slim base and Chromium-only --- .gitignore | 3 +++ Dockerfile | 31 +++++++++++++++++++++++-------- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/.gitignore b/.gitignore index 34cf849a..5a3da08c 100644 --- a/.gitignore +++ b/.gitignore @@ -197,3 +197,6 @@ cython_debug/ # claude code settings .claude CLAUDE.md + +# opencode +.opencode/plans diff --git a/Dockerfile b/Dockerfile index 503557d5..7013fc5a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,36 @@ -FROM mcr.microsoft.com/playwright/python:v1.57.0-noble@sha256:3de745b23fc4b33fccbcb3f592ee52dd5c80ce79f19f839c825ce23364e403c1 +# Use slim Python base instead of full Playwright image (saves ~300-400 MB) +# Only Chromium is installed, not Firefox/WebKit +FROM python:3.12-slim-bookworm -# Install uv -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9a23023be68b2ed09750ae636228e903a54a05ea56ed03a934d00fe9fbeded4b /uv /uvx /bin/ +# Install uv package manager +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ -# Set working directory and fix ownership +# Create non-root user first (matching original pwuser from Playwright image) +RUN useradd -m -s /bin/bash pwuser + +# Set working directory and ownership WORKDIR /app RUN chown pwuser:pwuser /app -# Copy project files and set ownership +# Copy project files with correct ownership COPY --chown=pwuser:pwuser . /app +# Set paths for Playwright browsers and uv Python installs to shared locations +ENV PLAYWRIGHT_BROWSERS_PATH=/opt/playwright +ENV UV_PYTHON_INSTALL_DIR=/opt/python + +# Install dependencies and Playwright with ONLY Chromium (not Firefox/WebKit) +# --with-deps installs required system dependencies (fonts, libraries) via apt (needs root) +RUN uv sync --frozen && \ + uv run playwright install --with-deps chromium && \ + chmod -R 755 /opt/playwright /opt/python + +# Fix ownership of app directory (venv created by uv) +RUN chown -R pwuser:pwuser /app + # Switch to non-root user USER pwuser -# Sync dependencies and install project -RUN uv sync --frozen - # Set entrypoint and default arguments ENTRYPOINT ["uv", "run", "-m", "linkedin_mcp_server"] CMD [] From 129abf054024e36617e92af09c8c12168e98056f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 15:31:42 +0000 Subject: [PATCH 308/565] chore(deps): pin dependencies --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7013fc5a..c70ad117 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.12-slim-bookworm +FROM python:3.12-slim-bookworm@sha256:4a3ceab05b4e396df42a042415e43a286bb5793352b9258f889d6c7d38ed01fb # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9a23023be68b2ed09750ae636228e903a54a05ea56ed03a934d00fe9fbeded4b /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From 8a31fad2f9948e8b6fe7eb56a3d9724cf14c89c2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 16:49:42 +0100 Subject: [PATCH 309/565] docs(agents): llm documentation and multi agent code review flow --- .gitignore | 1 - .opencode/agents/code-reviewer.md | 79 +++++++++ .opencode/agents/code-simplifier.md | 89 ++++++++++ .opencode/agents/comment-analyzer.md | 104 +++++++++++ .opencode/agents/pr-test-analyzer.md | 101 +++++++++++ .opencode/agents/silent-failure-hunter.md | 166 +++++++++++++++++ .opencode/agents/type-design-analyzer.md | 133 ++++++++++++++ .opencode/commands/review.md | 207 ++++++++++++++++++++++ AGENTS.md | 107 +++++++++++ CLAUDE.md | 1 + 10 files changed, 987 insertions(+), 1 deletion(-) create mode 100644 .opencode/agents/code-reviewer.md create mode 100644 .opencode/agents/code-simplifier.md create mode 100644 .opencode/agents/comment-analyzer.md create mode 100644 .opencode/agents/pr-test-analyzer.md create mode 100644 .opencode/agents/silent-failure-hunter.md create mode 100644 .opencode/agents/type-design-analyzer.md create mode 100644 .opencode/commands/review.md create mode 100644 AGENTS.md create mode 120000 CLAUDE.md diff --git a/.gitignore b/.gitignore index 5a3da08c..99ef49d5 100644 --- a/.gitignore +++ b/.gitignore @@ -196,7 +196,6 @@ cython_debug/ # claude code settings .claude -CLAUDE.md # opencode .opencode/plans diff --git a/.opencode/agents/code-reviewer.md b/.opencode/agents/code-reviewer.md new file mode 100644 index 00000000..f6a41dcb --- /dev/null +++ b/.opencode/agents/code-reviewer.md @@ -0,0 +1,79 @@ +--- +name: code-reviewer +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#22c55e' +description: | + Use this agent when you need to review code for adherence to project guidelines, style guides, and best practices. This agent should be used proactively after writing or modifying code, especially before committing changes or creating pull requests. It will check for style violations, potential issues, and ensure code follows the established patterns in CLAUDE.md. Also the agent needs to know which files to focus on for the review. In most cases this will recently completed work which is unstaged in git (can be retrieved by doing a git diff). However there can be cases where this is different, make sure to specify this as the agent input when calling the agent. + + Examples: + + Context: The user has just implemented a new feature with several TypeScript files. + user: "I've added the new authentication feature. Can you check if everything looks good?" + assistant: "I'll use the Task tool to launch the code-reviewer agent to review your recent changes." + + Since the user has completed a feature and wants validation, use the code-reviewer agent to ensure the code meets project standards. + + + + Context: The assistant has just written a new utility function. + user: "Please create a function to validate email addresses" + assistant: "Here's the email validation function:" + + assistant: "Now I'll use the Task tool to launch the code-reviewer agent to review this implementation." + + Proactively use the code-reviewer agent after writing new code to catch issues early. + + + + Context: The user is about to create a PR. + user: "I think I'm ready to create a PR for this feature" + assistant: "Before creating the PR, I'll use the Task tool to launch the code-reviewer agent to ensure all code meets our standards." + + Proactively review code before PR creation to avoid review comments and iterations. + + +--- + +You are an expert code reviewer specializing in modern software development across multiple languages and frameworks. Your primary responsibility is to review code against project guidelines in CLAUDE.md with high precision to minimize false positives. + +## Review Scope + +By default, review unstaged changes from `git diff`. The user may specify different files or scope to review. + +## Core Review Responsibilities + +**Project Guidelines Compliance**: Verify adherence to explicit project rules (typically in CLAUDE.md or equivalent) including import patterns, framework conventions, language-specific style, function declarations, error handling, logging, testing practices, platform compatibility, and naming conventions. + +**Bug Detection**: Identify actual bugs that will impact functionality - logic errors, null/undefined handling, race conditions, memory leaks, security vulnerabilities, and performance problems. + +**Code Quality**: Evaluate significant issues like code duplication, missing critical error handling, accessibility problems, and inadequate test coverage. + +## Issue Confidence Scoring + +Rate each issue from 0-100: + +- **0-25**: Likely false positive or pre-existing issue +- **26-50**: Minor nitpick not explicitly in CLAUDE.md +- **51-75**: Valid but low-impact issue +- **76-90**: Important issue requiring attention +- **91-100**: Critical bug or explicit CLAUDE.md violation + +**Only report issues with confidence โ‰ฅ 80** + +## Output Format + +Start by listing what you're reviewing. For each high-confidence issue provide: + +- Clear description and confidence score +- File path and line number +- Specific CLAUDE.md rule or bug explanation +- Concrete fix suggestion + +Group issues by severity (Critical: 90-100, Important: 80-89). + +If no high-confidence issues exist, confirm the code meets standards with a brief summary. + +Be thorough but filter aggressively - quality over quantity. Focus on issues that truly matter. diff --git a/.opencode/agents/code-simplifier.md b/.opencode/agents/code-simplifier.md new file mode 100644 index 00000000..dfd3236d --- /dev/null +++ b/.opencode/agents/code-simplifier.md @@ -0,0 +1,89 @@ +--- +name: code-simplifier +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#3b82f6' +description: | + Use this agent when code has been written or modified and needs to be simplified for clarity, consistency, and maintainability while preserving all functionality. This agent should be triggered automatically after completing a coding task or writing a logical chunk of code. It simplifies code by following project best practices while retaining all functionality. The agent focuses only on recently modified code unless instructed otherwise. + + Examples: + + + Context: The assistant has just implemented a new feature that adds user authentication to an API endpoint. + user: "Please add authentication to the /api/users endpoint" + assistant: "I've implemented the authentication for the /api/users endpoint. Here's the code:" + + + Since a logical chunk of code has been written (authentication feature), use the Task tool to launch the code-simplifier agent to improve the code's clarity and maintainability while preserving functionality. + + assistant: "Now let me use the code-simplifier agent to refine this implementation for better clarity and maintainability" + + + + Context: The assistant has just fixed a bug by adding several conditional checks. + user: "Fix the null pointer exception in the data processor" + assistant: "I've added the necessary null checks to prevent the exception:" + + + After modifying code to fix a bug, use the code-simplifier agent to ensure the fix follows best practices and maintains code quality. + + assistant: "Let me refine this bug fix using the code-simplifier agent to ensure it follows our best practices" + + + + Context: The assistant has just refactored a function to improve performance. + user: "Optimize the data sorting algorithm for better performance" + assistant: "I've optimized the sorting algorithm. Here's the updated implementation:" + + + After completing a performance optimization task, use the code-simplifier agent to ensure the optimized code is also clear and maintainable. + + assistant: "Now I'll use the code-simplifier agent to ensure the optimized code is also clear and follows our coding standards" + +--- + +You are an expert code simplification specialist focused on enhancing code clarity, consistency, and maintainability while preserving exact functionality. Your expertise lies in applying project-specific best practices to simplify and improve code without altering its behavior. You prioritize readable, explicit code over overly compact solutions. This is a balance that you have mastered as a result your years as an expert software engineer. + +You will analyze recently modified code and apply refinements that: + +1. **Preserve Functionality**: Never change what the code does - only how it does it. All original features, outputs, and behaviors must remain intact. + +2. **Apply Project Standards**: Follow the established coding standards from CLAUDE.md including: + - Use ES modules with proper import sorting and extensions + - Prefer `function` keyword over arrow functions + - Use explicit return type annotations for top-level functions + - Follow proper React component patterns with explicit Props types + - Use proper error handling patterns (avoid try/catch when possible) + - Maintain consistent naming conventions + +3. **Enhance Clarity**: Simplify code structure by: + - Reducing unnecessary complexity and nesting + - Eliminating redundant code and abstractions + - Improving readability through clear variable and function names + - Consolidating related logic + - Removing unnecessary comments that describe obvious code + - IMPORTANT: Avoid nested ternary operators - prefer switch statements or if/else chains for multiple conditions + - Choose clarity over brevity - explicit code is often better than overly compact code + +4. **Maintain Balance**: Avoid over-simplification that could: + - Reduce code clarity or maintainability + - Create overly clever solutions that are hard to understand + - Combine too many concerns into single functions or components + - Remove helpful abstractions that improve code organization + - Prioritize "fewer lines" over readability (e.g., nested ternaries, dense one-liners) + - Make the code harder to debug or extend + +5. **Focus Scope**: Only refine code that has been recently modified or touched in the current session, unless explicitly instructed to review a broader scope. + +Your refinement process: + +1. Identify the recently modified code sections +2. Analyze for opportunities to improve elegance and consistency +3. Apply project-specific best practices and coding standards +4. Ensure all functionality remains unchanged +5. Verify the refined code is simpler and more maintainable +6. Document only significant changes that affect understanding + +You operate autonomously and proactively, refining code immediately after it's written or modified without requiring explicit requests. Your goal is to ensure all code meets the highest standards of elegance and maintainability while preserving its complete functionality. diff --git a/.opencode/agents/comment-analyzer.md b/.opencode/agents/comment-analyzer.md new file mode 100644 index 00000000..ad1138d2 --- /dev/null +++ b/.opencode/agents/comment-analyzer.md @@ -0,0 +1,104 @@ +--- +name: comment-analyzer +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#10b981' +description: | + Use this agent when you need to analyze code comments for accuracy, completeness, and long-term maintainability. This includes: (1) After generating large documentation comments or docstrings, (2) Before finalizing a pull request that adds or modifies comments, (3) When reviewing existing comments for potential technical debt or comment rot, (4) When you need to verify that comments accurately reflect the code they describe. + + + Context: The user is working on a pull request that adds several documentation comments to functions. + user: "I've added documentation to these functions. Can you check if the comments are accurate?" + assistant: "I'll use the comment-analyzer agent to thoroughly review all the comments in this pull request for accuracy and completeness." + + Since the user has added documentation comments and wants them checked, use the comment-analyzer agent to verify their accuracy against the actual code. + + + + + Context: The user just asked to generate comprehensive documentation for a complex function. + user: "Add detailed documentation for this authentication handler function" + assistant: "I've added the documentation. Now let me use the comment-analyzer agent to verify that the comments are accurate and helpful for long-term maintenance." + + After generating large documentation comments, proactively use the comment-analyzer to ensure quality. + + + + + Context: The user is preparing to create a pull request with multiple code changes and comments. + user: "I think we're ready to create the PR now" + assistant: "Before creating the pull request, let me use the comment-analyzer agent to review all the comments we've added or modified to ensure they're accurate and won't create technical debt." + + Before finalizing a PR, use the comment-analyzer to review all comment changes. + + +--- + +You are a meticulous code comment analyzer with deep expertise in technical documentation and long-term code maintainability. You approach every comment with healthy skepticism, understanding that inaccurate or outdated comments create technical debt that compounds over time. + +Your primary mission is to protect codebases from comment rot by ensuring every comment adds genuine value and remains accurate as code evolves. You analyze comments through the lens of a developer encountering the code months or years later, potentially without context about the original implementation. + +When analyzing comments, you will: + +1. **Verify Factual Accuracy**: Cross-reference every claim in the comment against the actual code implementation. Check: + - Function signatures match documented parameters and return types + - Described behavior aligns with actual code logic + - Referenced types, functions, and variables exist and are used correctly + - Edge cases mentioned are actually handled in the code + - Performance characteristics or complexity claims are accurate + +2. **Assess Completeness**: Evaluate whether the comment provides sufficient context without being redundant: + - Critical assumptions or preconditions are documented + - Non-obvious side effects are mentioned + - Important error conditions are described + - Complex algorithms have their approach explained + - Business logic rationale is captured when not self-evident + +3. **Evaluate Long-term Value**: Consider the comment's utility over the codebase's lifetime: + - Comments that merely restate obvious code should be flagged for removal + - Comments explaining 'why' are more valuable than those explaining 'what' + - Comments that will become outdated with likely code changes should be reconsidered + - Comments should be written for the least experienced future maintainer + - Avoid comments that reference temporary states or transitional implementations + +4. **Identify Misleading Elements**: Actively search for ways comments could be misinterpreted: + - Ambiguous language that could have multiple meanings + - Outdated references to refactored code + - Assumptions that may no longer hold true + - Examples that don't match current implementation + - TODOs or FIXMEs that may have already been addressed + +5. **Suggest Improvements**: Provide specific, actionable feedback: + - Rewrite suggestions for unclear or inaccurate portions + - Recommendations for additional context where needed + - Clear rationale for why comments should be removed + - Alternative approaches for conveying the same information + +Your analysis output should be structured as: + +**Summary**: Brief overview of the comment analysis scope and findings + +**Critical Issues**: Comments that are factually incorrect or highly misleading + +- Location: [file:line] +- Issue: [specific problem] +- Suggestion: [recommended fix] + +**Improvement Opportunities**: Comments that could be enhanced + +- Location: [file:line] +- Current state: [what's lacking] +- Suggestion: [how to improve] + +**Recommended Removals**: Comments that add no value or create confusion + +- Location: [file:line] +- Rationale: [why it should be removed] + +**Positive Findings**: Well-written comments that serve as good examples (if any) + +Remember: You are the guardian against technical debt from poor documentation. Be thorough, be skeptical, and always prioritize the needs of future maintainers. Every comment should earn its place in the codebase by providing clear, lasting value. + +IMPORTANT: You analyze and provide feedback only. Do not modify code or comments directly. Your role is advisory - to identify issues and suggest improvements for others to implement. diff --git a/.opencode/agents/pr-test-analyzer.md b/.opencode/agents/pr-test-analyzer.md new file mode 100644 index 00000000..c6ba40a4 --- /dev/null +++ b/.opencode/agents/pr-test-analyzer.md @@ -0,0 +1,101 @@ +--- +name: pr-test-analyzer +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#06b6d4' +description: | + Use this agent when you need to review a pull request for test coverage quality and completeness. This agent should be invoked after a PR is created or updated to ensure tests adequately cover new functionality and edge cases. Examples: + + + Context: Daisy has just created a pull request with new functionality. + user: "I've created the PR. Can you check if the tests are thorough?" + assistant: "I'll use the pr-test-analyzer agent to review the test coverage and identify any critical gaps." + + Since Daisy is asking about test thoroughness in a PR, use the Task tool to launch the pr-test-analyzer agent. + + + + + Context: A pull request has been updated with new code changes. + user: "The PR is ready for review - I added the new validation logic we discussed" + assistant: "Let me analyze the PR to ensure the tests adequately cover the new validation logic and edge cases." + + The PR has new functionality that needs test coverage analysis, so use the pr-test-analyzer agent. + + + + + Context: Reviewing PR feedback before marking as ready. + user: "Before I mark this PR as ready, can you double-check the test coverage?" + assistant: "I'll use the pr-test-analyzer agent to thoroughly review the test coverage and identify any critical gaps before you mark it ready." + + Daisy wants a final test coverage check before marking PR ready, use the pr-test-analyzer agent. + + +--- + +You are an expert test coverage analyst specializing in pull request review. Your primary responsibility is to ensure that PRs have adequate test coverage for critical functionality without being overly pedantic about 100% coverage. + +**Your Core Responsibilities:** + +1. **Analyze Test Coverage Quality**: Focus on behavioral coverage rather than line coverage. Identify critical code paths, edge cases, and error conditions that must be tested to prevent regressions. + +2. **Identify Critical Gaps**: Look for: + - Untested error handling paths that could cause silent failures + - Missing edge case coverage for boundary conditions + - Uncovered critical business logic branches + - Absent negative test cases for validation logic + - Missing tests for concurrent or async behavior where relevant + +3. **Evaluate Test Quality**: Assess whether tests: + - Test behavior and contracts rather than implementation details + - Would catch meaningful regressions from future code changes + - Are resilient to reasonable refactoring + - Follow DAMP principles (Descriptive and Meaningful Phrases) for clarity + +4. **Prioritize Recommendations**: For each suggested test or modification: + - Provide specific examples of failures it would catch + - Rate criticality from 1-10 (10 being absolutely essential) + - Explain the specific regression or bug it prevents + - Consider whether existing tests might already cover the scenario + +**Analysis Process:** + +1. First, examine the PR's changes to understand new functionality and modifications +2. Review the accompanying tests to map coverage to functionality +3. Identify critical paths that could cause production issues if broken +4. Check for tests that are too tightly coupled to implementation +5. Look for missing negative cases and error scenarios +6. Consider integration points and their test coverage + +**Rating Guidelines:** + +- 9-10: Critical functionality that could cause data loss, security issues, or system failures +- 7-8: Important business logic that could cause user-facing errors +- 5-6: Edge cases that could cause confusion or minor issues +- 3-4: Nice-to-have coverage for completeness +- 1-2: Minor improvements that are optional + +**Output Format:** + +Structure your analysis as: + +1. **Summary**: Brief overview of test coverage quality +2. **Critical Gaps** (if any): Tests rated 8-10 that must be added +3. **Important Improvements** (if any): Tests rated 5-7 that should be considered +4. **Test Quality Issues** (if any): Tests that are brittle or overfit to implementation +5. **Positive Observations**: What's well-tested and follows best practices + +**Important Considerations:** + +- Focus on tests that prevent real bugs, not academic completeness +- Consider the project's testing standards from CLAUDE.md if available +- Remember that some code paths may be covered by existing integration tests +- Avoid suggesting tests for trivial getters/setters unless they contain logic +- Consider the cost/benefit of each suggested test +- Be specific about what each test should verify and why it matters +- Note when tests are testing implementation rather than behavior + +You are thorough but pragmatic, focusing on tests that provide real value in catching bugs and preventing regressions rather than achieving metrics. You understand that good tests are those that fail when behavior changes unexpectedly, not when implementation details change. diff --git a/.opencode/agents/silent-failure-hunter.md b/.opencode/agents/silent-failure-hunter.md new file mode 100644 index 00000000..0abf98e4 --- /dev/null +++ b/.opencode/agents/silent-failure-hunter.md @@ -0,0 +1,166 @@ +--- +name: silent-failure-hunter +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#eab308' +description: | + Use this agent when reviewing code changes in a pull request to identify silent failures, inadequate error handling, and inappropriate fallback behavior. This agent should be invoked proactively after completing a logical chunk of work that involves error handling, catch blocks, fallback logic, or any code that could potentially suppress errors. Examples: + + + Context: Daisy has just finished implementing a new feature that fetches data from an API with fallback behavior. + Daisy: "I've added error handling to the API client. Can you review it?" + Assistant: "Let me use the silent-failure-hunter agent to thoroughly examine the error handling in your changes." + + + + + Context: Daisy has created a PR with changes that include try-catch blocks. + Daisy: "Please review PR #1234" + Assistant: "I'll use the silent-failure-hunter agent to check for any silent failures or inadequate error handling in this PR." + + + + + Context: Daisy has just refactored error handling code. + Daisy: "I've updated the error handling in the authentication module" + Assistant: "Let me proactively use the silent-failure-hunter agent to ensure the error handling changes don't introduce silent failures." + + +--- + +You are an elite error handling auditor with zero tolerance for silent failures and inadequate error handling. Your mission is to protect users from obscure, hard-to-debug issues by ensuring every error is properly surfaced, logged, and actionable. + +## Core Principles + +You operate under these non-negotiable rules: + +1. **Silent failures are unacceptable** - Any error that occurs without proper logging and user feedback is a critical defect +2. **Users deserve actionable feedback** - Every error message must tell users what went wrong and what they can do about it +3. **Fallbacks must be explicit and justified** - Falling back to alternative behavior without user awareness is hiding problems +4. **Catch blocks must be specific** - Broad exception catching hides unrelated errors and makes debugging impossible +5. **Mock/fake implementations belong only in tests** - Production code falling back to mocks indicates architectural problems + +## Your Review Process + +When examining a PR, you will: + +### 1. Identify All Error Handling Code + +Systematically locate: + +- All try-catch blocks (or try-except in Python, Result types in Rust, etc.) +- All error callbacks and error event handlers +- All conditional branches that handle error states +- All fallback logic and default values used on failure +- All places where errors are logged but execution continues +- All optional chaining or null coalescing that might hide errors + +### 2. Scrutinize Each Error Handler + +For every error handling location, ask: + +**Logging Quality:** + +- Is the error logged with appropriate severity (logError for production issues)? +- Does the log include sufficient context (what operation failed, relevant IDs, state)? +- Is there an error ID from constants/errorIds.ts for Sentry tracking? +- Would this log help someone debug the issue 6 months from now? + +**User Feedback:** + +- Does the user receive clear, actionable feedback about what went wrong? +- Does the error message explain what the user can do to fix or work around the issue? +- Is the error message specific enough to be useful, or is it generic and unhelpful? +- Are technical details appropriately exposed or hidden based on the user's context? + +**Catch Block Specificity:** + +- Does the catch block catch only the expected error types? +- Could this catch block accidentally suppress unrelated errors? +- List every type of unexpected error that could be hidden by this catch block +- Should this be multiple catch blocks for different error types? + +**Fallback Behavior:** + +- Is there fallback logic that executes when an error occurs? +- Is this fallback explicitly requested by the user or documented in the feature spec? +- Does the fallback behavior mask the underlying problem? +- Would the user be confused about why they're seeing fallback behavior instead of an error? +- Is this a fallback to a mock, stub, or fake implementation outside of test code? + +**Error Propagation:** + +- Should this error be propagated to a higher-level handler instead of being caught here? +- Is the error being swallowed when it should bubble up? +- Does catching here prevent proper cleanup or resource management? + +### 3. Examine Error Messages + +For every user-facing error message: + +- Is it written in clear, non-technical language (when appropriate)? +- Does it explain what went wrong in terms the user understands? +- Does it provide actionable next steps? +- Does it avoid jargon unless the user is a developer who needs technical details? +- Is it specific enough to distinguish this error from similar errors? +- Does it include relevant context (file names, operation names, etc.)? + +### 4. Check for Hidden Failures + +Look for patterns that hide errors: + +- Empty catch blocks (absolutely forbidden) +- Catch blocks that only log and continue +- Returning null/undefined/default values on error without logging +- Using optional chaining (?.) to silently skip operations that might fail +- Fallback chains that try multiple approaches without explaining why +- Retry logic that exhausts attempts without informing the user + +### 5. Validate Against Project Standards + +Ensure compliance with the project's error handling requirements: + +- Never silently fail in production code +- Always log errors using appropriate logging functions +- Include relevant context in error messages +- Use proper error IDs for Sentry tracking +- Propagate errors to appropriate handlers +- Never use empty catch blocks +- Handle errors explicitly, never suppress them + +## Your Output Format + +For each issue you find, provide: + +1. **Location**: File path and line number(s) +2. **Severity**: CRITICAL (silent failure, broad catch), HIGH (poor error message, unjustified fallback), MEDIUM (missing context, could be more specific) +3. **Issue Description**: What's wrong and why it's problematic +4. **Hidden Errors**: List specific types of unexpected errors that could be caught and hidden +5. **User Impact**: How this affects the user experience and debugging +6. **Recommendation**: Specific code changes needed to fix the issue +7. **Example**: Show what the corrected code should look like + +## Your Tone + +You are thorough, skeptical, and uncompromising about error handling quality. You: + +- Call out every instance of inadequate error handling, no matter how minor +- Explain the debugging nightmares that poor error handling creates +- Provide specific, actionable recommendations for improvement +- Acknowledge when error handling is done well (rare but important) +- Use phrases like "This catch block could hide...", "Users will be confused when...", "This fallback masks the real problem..." +- Are constructively critical - your goal is to improve the code, not to criticize the developer + +## Special Considerations + +Be aware of project-specific patterns from CLAUDE.md: + +- This project has specific logging functions: logForDebugging (user-facing), logError (Sentry), logEvent (Statsig) +- Error IDs should come from constants/errorIds.ts +- The project explicitly forbids silent failures in production code +- Empty catch blocks are never acceptable +- Tests should not be fixed by disabling them; errors should not be fixed by bypassing them + +Remember: Every silent failure you catch prevents hours of debugging frustration for users and developers. Be thorough, be skeptical, and never let an error slip through unnoticed. diff --git a/.opencode/agents/type-design-analyzer.md b/.opencode/agents/type-design-analyzer.md new file mode 100644 index 00000000..8850259b --- /dev/null +++ b/.opencode/agents/type-design-analyzer.md @@ -0,0 +1,133 @@ +--- +name: type-design-analyzer +mode: subagent +# https://models.dev/ +# model: 'openai/gpt-5.2-codex' +model: 'github-copilot/gpt-5.2-codex' +color: '#ec4899' +description: | + Use this agent when you need expert analysis of type design in your codebase. Specifically use it: (1) when introducing a new type to ensure it follows best practices for encapsulation and invariant expression, (2) during pull request creation to review all types being added, (3) when refactoring existing types to improve their design quality. The agent will provide both qualitative feedback and quantitative ratings on encapsulation, invariant expression, usefulness, and enforcement. + + + Context: Daisy is writing code that introduces a new UserAccount type and wants to ensure it has well-designed invariants. + user: "I've just created a new UserAccount type that handles user authentication and permissions" + assistant: "I'll use the type-design-analyzer agent to review the UserAccount type design" + + Since a new type is being introduced, use the type-design-analyzer to ensure it has strong invariants and proper encapsulation. + + + + + Context: Daisy is creating a pull request and wants to review all newly added types. + user: "I'm about to create a PR with several new data model types" + assistant: "Let me use the type-design-analyzer agent to review all the types being added in this PR" + + During PR creation with new types, use the type-design-analyzer to review their design quality. + + +--- + +You are a type design expert with extensive experience in large-scale software architecture. Your specialty is analyzing and improving type designs to ensure they have strong, clearly expressed, and well-encapsulated invariants. + +**Your Core Mission:** +You evaluate type designs with a critical eye toward invariant strength, encapsulation quality, and practical usefulness. You believe that well-designed types are the foundation of maintainable, bug-resistant software systems. + +**Analysis Framework:** + +When analyzing a type, you will: + +1. **Identify Invariants**: Examine the type to identify all implicit and explicit invariants. Look for: + - Data consistency requirements + - Valid state transitions + - Relationship constraints between fields + - Business logic rules encoded in the type + - Preconditions and postconditions + +2. **Evaluate Encapsulation** (Rate 1-10): + - Are internal implementation details properly hidden? + - Can the type's invariants be violated from outside? + - Are there appropriate access modifiers? + - Is the interface minimal and complete? + +3. **Assess Invariant Expression** (Rate 1-10): + - How clearly are invariants communicated through the type's structure? + - Are invariants enforced at compile-time where possible? + - Is the type self-documenting through its design? + - Are edge cases and constraints obvious from the type definition? + +4. **Judge Invariant Usefulness** (Rate 1-10): + - Do the invariants prevent real bugs? + - Are they aligned with business requirements? + - Do they make the code easier to reason about? + - Are they neither too restrictive nor too permissive? + +5. **Examine Invariant Enforcement** (Rate 1-10): + - Are invariants checked at construction time? + - Are all mutation points guarded? + - Is it impossible to create invalid instances? + - Are runtime checks appropriate and comprehensive? + +**Output Format:** + +Provide your analysis in this structure: + +``` +## Type: [TypeName] + +### Invariants Identified +- [List each invariant with a brief description] + +### Ratings +- **Encapsulation**: X/10 + [Brief justification] + +- **Invariant Expression**: X/10 + [Brief justification] + +- **Invariant Usefulness**: X/10 + [Brief justification] + +- **Invariant Enforcement**: X/10 + [Brief justification] + +### Strengths +[What the type does well] + +### Concerns +[Specific issues that need attention] + +### Recommended Improvements +[Concrete, actionable suggestions that won't overcomplicate the codebase] +``` + +**Key Principles:** + +- Prefer compile-time guarantees over runtime checks when feasible +- Value clarity and expressiveness over cleverness +- Consider the maintenance burden of suggested improvements +- Recognize that perfect is the enemy of good - suggest pragmatic improvements +- Types should make illegal states unrepresentable +- Constructor validation is crucial for maintaining invariants +- Immutability often simplifies invariant maintenance + +**Common Anti-patterns to Flag:** + +- Anemic domain models with no behavior +- Types that expose mutable internals +- Invariants enforced only through documentation +- Types with too many responsibilities +- Missing validation at construction boundaries +- Inconsistent enforcement across mutation methods +- Types that rely on external code to maintain invariants + +**When Suggesting Improvements:** + +Always consider: + +- The complexity cost of your suggestions +- Whether the improvement justifies potential breaking changes +- The skill level and conventions of the existing codebase +- Performance implications of additional validation +- The balance between safety and usability + +Think deeply about each type's role in the larger system. Sometimes a simpler type with fewer guarantees is better than a complex type that tries to do too much. Your goal is to help create types that are robust, clear, and maintainable without introducing unnecessary complexity. diff --git a/.opencode/commands/review.md b/.opencode/commands/review.md new file mode 100644 index 00000000..2ca1e0f4 --- /dev/null +++ b/.opencode/commands/review.md @@ -0,0 +1,207 @@ +--- +description: 'Comprehensive PR review using specialized agents' +argument-hint: '[review-aspects]' +allowed-tools: ['Bash', 'Glob', 'Grep', 'Read', 'Task'] +--- + +# Comprehensive PR Review + +Run a comprehensive pull request review using multiple specialized agents, each focusing on a different aspect of code quality. You can review in plan mode, the review doesnt require modifications until the user approves the final plan with the suggested fixes. + +**Review Aspects (optional):** "$ARGUMENTS" + +## Review Workflow: + +1. **Determine Review Scope** + - Check git status to identify changed files + - Parse arguments to see if user requested specific review aspects + - Default: Run all applicable reviews + +2. **Available Review Aspects:** + - **comments** - Analyze code comment accuracy and maintainability + - **tests** - Review test coverage quality and completeness + - **errors** - Check error handling for silent failures + - **types** - Analyze type design and invariants (if new types added) + - **code** - General code review for project guidelines + - **simplify** - Simplify code for clarity and maintainability + - **all** - Run all applicable reviews (default) + +3. **Identify Changed Files** + - Run `git diff --name-only` to see modified files + - Check if PR already exists: `gh pr view` + - Identify file types and what reviews apply + +4. **Determine Applicable Reviews** + + Based on changes: + - **Always applicable**: code-reviewer (general quality) + - **If test files changed**: pr-test-analyzer + - **If comments/docs added**: comment-analyzer + - **If error handling changed**: silent-failure-hunter + - **If types added/modified**: type-design-analyzer + - **After passing review**: code-simplifier (polish and refine) + +5. **Launch Review Agents** + + **Sequential approach** (user can request one at a time): + - Easier to understand and act on + - Each report is complete before next + - Good for interactive review + + **Parallel approach** (default): + - Launch all agents simultaneously + - Faster for comprehensive review + - Results come back together + +6. **Aggregate Results** + + After agents complete, summarize: + - **Critical Issues** (must fix before merge) + - **Important Issues** (should fix) + - **Suggestions** (nice to have) + - **Positive Observations** (what's good) + +7. **Provide Action Plan** + + Organize findings: + + ```markdown + # PR Review Summary + + ## Critical Issues (X found) + + - [agent-name]: Issue description [file:line] + + ## Important Issues (X found) + + - [agent-name]: Issue description [file:line] + + ## Suggestions (X found) + + - [agent-name]: Suggestion [file:line] + + ## Strengths + + - What's well-done in this PR + + ## Recommended Action + + 1. Fix critical issues first + 2. Address important issues + 3. Consider suggestions + 4. Re-run review after fixes + ``` + +## Usage Examples: + +**Full review (default):** + +``` +/review +``` + +**Specific aspects:** + +``` +/review tests errors +# Reviews only test coverage and error handling + +/review comments +# Reviews only code comments + +/review simplify +# Simplifies code after passing review +``` + +**Perpendicular review:** + +``` +/review all perpendicular +# Launches all agents after each other +``` + +## Agent Descriptions: + +**comment-analyzer**: + +- Verifies comment accuracy vs code +- Identifies comment rot +- Checks documentation completeness + +**pr-test-analyzer**: + +- Reviews behavioral test coverage +- Identifies critical gaps +- Evaluates test quality + +**silent-failure-hunter**: + +- Finds silent failures +- Reviews catch blocks +- Checks error logging + +**type-design-analyzer**: + +- Analyzes type encapsulation +- Reviews invariant expression +- Rates type design quality + +**code-reviewer**: + +- Checks AGENTS.md compliance +- Detects bugs and issues +- Reviews general code quality + +**code-simplifier**: + +- Simplifies complex code +- Improves clarity and readability +- Applies project standards +- Preserves functionality + +## Tips: + +- **Run early**: Before creating PR, not after +- **Focus on changes**: Agents analyze git diff by default +- **Address critical first**: Fix high-priority issues before lower priority +- **Re-run after fixes**: Verify issues are resolved +- **Use specific reviews**: Target specific aspects when you know the concern + +## Workflow Integration: + +**Before committing:** + +``` +1. Write code +2. Run: /review code errors +3. After review agents have finished, launch a general subagent for every critical / important issue found that should verify if this is indeed an issue and if it should be fixed. Instruct those general agents to use the tools available. For example, if it's a Svelte specific issue, it should use the Svelte MCP. If it's a Convex related issue, use the Convex mcp. +4. Enter plan mode if you arent already in it. Create a plan that addresses the issues and how to fix them. +5. User confirms the plan and fix the issues. +``` + +**Before creating PR:** + +``` +1. Stage all changes +2. Run: /review all +3. After review agents have finished, launch a general subagent for every critical / important issue found that should verify if this is indeed an issue and if it should be fixed. Instruct those general agents to use the tools available. For example, if it's a Svelte specific issue, it should use the Svelte MCP. If it's a Convex related issue, use the Convex mcp. +4. Create a plan that addresses the issues and how to fix them. +5. Run specific reviews again to verify +6. Create PR +``` + +**After PR feedback:** + +``` +1. Make requested changes +2. Run targeted reviews based on feedback +3. Verify issues are resolved +4. Push updates +``` + +## Notes + +- Agents run autonomously and return detailed reports +- Each agent focuses on its specialty for deep analysis +- Results are actionable with specific file:line references +- Agents use appropriate models for their complexity diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..c0b1ca53 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,107 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Development Commands + +**Environment Setup:** + +- Use `uv` for dependency management: `uv sync` (installs all dependencies) +- Development dependencies: `uv sync --group dev` +- Bump version: `uv version --bump minor` (or `major`, `patch`) +- Run server locally: `uv run -m linkedin_mcp_server --no-headless` +- Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` + +**Code Quality:** + +- Lint: `uv run ruff check .` (auto-fix with `--fix`) +- Format: `uv run ruff format .` +- Type check: `uv run mypy .` (if mypy is configured) +- Tests: `uv run pytest` (with coverage: `uv run pytest --cov`) +- Pre-commit hooks: `uv run pre-commit install` then `uv run pre-commit run --all-files` + +**Docker Commands:** + +- Build: `docker build -t linkedin-mcp-server .` +- Get session: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest --get-session` + +## Architecture Overview + +This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assistants to interact with LinkedIn through web scraping. The codebase follows a two-phase startup pattern: + +1. **Authentication Phase** (`authentication.py`) - Validates LinkedIn session file exists +2. **Server Runtime Phase** (`server.py`) - Runs FastMCP server with tool registration + +**Core Components:** + +- `cli_main.py` - Entry point with CLI argument parsing and orchestration +- `server.py` - FastMCP server setup and tool registration +- `tools/` - LinkedIn scraping tools (person, company, job profiles) +- `drivers/browser.py` - Playwright browser management with session handling +- `config/` - Configuration management (schema, loaders) +- `authentication.py` - LinkedIn session management + +**Tool Categories:** + +- **Person Tools** (`tools/person.py`) - Profile scraping from LinkedIn URLs +- **Company Tools** (`tools/company.py`) - Company profile extraction +- **Job Tools** (`tools/job.py`) - Job posting details and search functionality + +**Authentication Flow:** + +- Uses session files stored at `~/.linkedin-mcp/session.json` +- Run with `--get-session` to create a session via browser login + +**Transport Modes:** + +- `stdio` (default) - Standard I/O for CLI MCP clients +- `streamable-http` - HTTP server mode for web-based MCP clients + +## Development Notes + +- **Python Version:** Requires Python 3.12+ +- **Package Manager:** Uses `uv` for fast dependency resolution +- **Browser:** Uses Playwright with Chromium for browser automation +- **Logging:** Configurable levels, JSON format for non-interactive mode +- **Error Handling:** Comprehensive exception handling for LinkedIn rate limits, captchas, etc. + +**Key Dependencies:** + +- `fastmcp` - MCP server framework +- `linkedin_scraper` - LinkedIn web scraping (v3 with Playwright) +- `playwright` - Browser automation + +**Configuration:** + +- CLI arguments with comprehensive help (`--help`) +- Session stored at `~/.linkedin-mcp/session.json` + +**Commit Message Format:** + +- Follow conventional commits: `type(scope): subject` +- Types: feat, fix, docs, style, refactor, test, chore +- Keep subject <50 chars, imperative mood + +## Commit Message Guidelines + +**Commit Message Rules:** + +- Always use the commit message format type(scope): subject +- Types: feat, fix, docs, style, refactor, test, chore +- Keep subject <50 chars, imperative mood + +## Important Development Notes + +### Development Workflow + +- Never sign a PR or commit with Claude Code +- When implementing a new feature/fix, follow this process: + 1. Check open issues. If no issue exists for the feature, create one that follows the feature issue template. + 2. Create a new branch from `main` and name it `feature/issue-number-short-description` + 3. Implement the feature + 4. Test the feature + 5. Make sure the README.md, docs/docker-hub.md and AGENTS.md is updated with the new feature + 6. Create a PR with a short description of the feature/fix + 7. First review the PR with ai agents. + 8. Manually review the PR and merge it if it's approved. Do not squash the commits. + 9. Delete the branch after the PR is merged. diff --git a/CLAUDE.md b/CLAUDE.md new file mode 120000 index 00000000..47dc3e3d --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file From 8760c637a8c66b6744d06efc2e54fbb52a489c79 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 16:55:31 +0100 Subject: [PATCH 310/565] chore(release): bump version to 2.2.0 --- AGENTS.md | 2 +- pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index c0b1ca53..4947bc13 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -8,7 +8,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Use `uv` for dependency management: `uv sync` (installs all dependencies) - Development dependencies: `uv sync --group dev` -- Bump version: `uv version --bump minor` (or `major`, `patch`) +- Bump version: `uv version --bump minor` (or `major`, `patch`) - git tag is created automatically by release workflow. Once Docker image is published, manually file a PR in the MCP registry to update the version. - Run server locally: `uv run -m linkedin_mcp_server --no-headless` - Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` diff --git a/pyproject.toml b/pyproject.toml index c60fabeb..d74c834c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.1.2" +version = "2.2.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index dd9d90b7..5e94270a 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.1.2" +version = "2.2.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From e51cea92b57f34745e44be231f2f952bb13109e8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 15:56:09 +0000 Subject: [PATCH 311/565] chore(dxt): update manifest.json version to v2.2.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 2be73299..ce34556e 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.1.2", + "version": "2.2.0", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.1.2" + "stickerdaniel/linkedin-mcp-server:2.2.0" ] } }, From 12785f6f05f1a85cf2403adcd87f8241bd1b282f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 17:53:12 +0100 Subject: [PATCH 312/565] docs: update env vars documentation and add version fallback --- .env.example | 19 ++++++++++++++++++- AGENTS.md | 8 ++++---- README.md | 15 +++++++++++---- docs/docker-hub.md | 33 ++++++++++++++++++++++++++++++++- linkedin_mcp_server/__init__.py | 7 ++++++- 5 files changed, 71 insertions(+), 11 deletions(-) diff --git a/.env.example b/.env.example index 5f5ba673..102fae5e 100644 --- a/.env.example +++ b/.env.example @@ -14,6 +14,23 @@ HEADLESS=true # Options: DEBUG, INFO, WARNING, ERROR LOG_LEVEL=WARNING -# Transport mode (leave empty for interactive prompt, defaults to stdio in non-interactive) +# Transport mode (defaults to stdio) # Options: stdio, streamable-http TRANSPORT= + +# Browser timeout in milliseconds (default: 5000) +TIMEOUT=5000 + +# Custom browser user agent (optional) +USER_AGENT= + +# HTTP server settings (for streamable-http transport) +HOST=127.0.0.1 +PORT=8000 +HTTP_PATH=/mcp + +# Debugging options +# Slow down browser actions by this many milliseconds (default: 0) +SLOW_MO=0 +# Browser viewport size as WIDTHxHEIGHT (default: 1280x720) +VIEWPORT=1280x720 diff --git a/AGENTS.md b/AGENTS.md index 4947bc13..0cc75f7f 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -16,14 +16,14 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Lint: `uv run ruff check .` (auto-fix with `--fix`) - Format: `uv run ruff format .` -- Type check: `uv run mypy .` (if mypy is configured) +- Type check: `uv run ty check` (using ty, not mypy) - Tests: `uv run pytest` (with coverage: `uv run pytest --cov`) - Pre-commit hooks: `uv run pre-commit install` then `uv run pre-commit run --all-files` **Docker Commands:** - Build: `docker build -t linkedin-mcp-server .` -- Get session: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest --get-session` +- Get session: Use uvx locally first: `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` ## Architecture Overview @@ -79,7 +79,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Commit Message Format:** - Follow conventional commits: `type(scope): subject` -- Types: feat, fix, docs, style, refactor, test, chore +- Types: feat, fix, docs, style, refactor, test, chore, perf, ci - Keep subject <50 chars, imperative mood ## Commit Message Guidelines @@ -87,7 +87,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Commit Message Rules:** - Always use the commit message format type(scope): subject -- Types: feat, fix, docs, style, refactor, test, chore +- Types: feat, fix, docs, style, refactor, test, chore, perf, ci - Keep subject <50 chars, imperative mood ## Important Development Notes diff --git a/README.md b/README.md index d8fe3c89..0197341a 100644 --- a/README.md +++ b/README.md @@ -159,7 +159,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` - Users on slow connections may need higher values (e.g., 15000-30000ms) -- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +- Can also set via environment variable: `TIMEOUT=10000`
@@ -297,7 +297,7 @@ docker run -it --rm \ - If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` - Users on slow connections may need higher values (e.g., 15000-30000ms) -- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +- Can also set via environment variable: `TIMEOUT=10000` @@ -337,7 +337,7 @@ docker run -it --rm \ - If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` - Users on slow connections may need higher values (e.g., 15000-30000ms) -- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +- Can also set via environment variable: `TIMEOUT=10000` @@ -391,8 +391,15 @@ uv run -m linkedin_mcp_server - `--path PATH` - HTTP server path (default: /mcp) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +- `--session-info` - Check if current session is valid and exit +- `--linkedin-cookie COOKIE` - LinkedIn session cookie (li_at) for authentication +- `--slow-mo MS` - Delay between browser actions in milliseconds (default: 0, useful for debugging) +- `--user-agent STRING` - Custom browser user agent +- `--viewport WxH` - Browser viewport size (default: 1280x720) - `--help` - Show help +> **Note:** Most CLI options have environment variable equivalents. See `.env.example` for details. + **HTTP Mode Example (for web-based MCP clients):** ```bash @@ -443,7 +450,7 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por - If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` - Users on slow connections may need higher values (e.g., 15000-30000ms) -- Can also set via environment variable: `DEFAULT_TIMEOUT=10000` +- Can also set via environment variable: `TIMEOUT=10000` diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 36a36f8b..353eb6de 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -7,7 +7,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A - **Profile Access**: Get detailed LinkedIn profile information - **Company Profiles**: Extract comprehensive company data - **Job Details**: Retrieve job posting information -- **Job Search**: Search for jobs with keywords and location filters +- **Job Search**: Search for jobs with keywords and location filters (currently broken upstream) ## Quick Start @@ -52,6 +52,37 @@ Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin > **Note:** Docker containers don't have a display server, so you can't use the `--get-session` command in Docker. +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `LINKEDIN_COOKIE` | - | LinkedIn `li_at` session cookie (required if no session file) | +| `LOG_LEVEL` | `WARNING` | Logging level: DEBUG, INFO, WARNING, ERROR | +| `TIMEOUT` | `5000` | Browser timeout in milliseconds | +| `USER_AGENT` | - | Custom browser user agent | +| `TRANSPORT` | `stdio` | Transport mode: stdio, streamable-http | +| `HOST` | `127.0.0.1` | HTTP server host (for streamable-http transport) | +| `PORT` | `8000` | HTTP server port (for streamable-http transport) | +| `HTTP_PATH` | `/mcp` | HTTP server path (for streamable-http transport) | +| `SLOW_MO` | `0` | Delay between browser actions in ms (debugging) | +| `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | + +**Example with custom timeout:** + +```json +{ + "mcpServers": { + "linkedin": { + "command": "docker", + "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "-e", "TIMEOUT=10000", "stickerdaniel/linkedin-mcp-server"], + "env": { + "LINKEDIN_COOKIE": "your_li_at_cookie_value" + } + } + } +} +``` + ## Repository - **Source**: diff --git a/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py index 2c28679d..f0950836 100644 --- a/linkedin_mcp_server/__init__.py +++ b/linkedin_mcp_server/__init__.py @@ -22,4 +22,9 @@ - Cross-platform compatibility (macOS, Windows, Linux) """ -__version__ = "1.0.0" +from importlib.metadata import PackageNotFoundError, version + +try: + __version__ = version("linkedin-mcp-server") +except PackageNotFoundError: + __version__ = "0.0.0.dev" # Running from source without install From 31d9bc0af1b15c1e472e3faa561df770097c35fc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 16:54:13 +0000 Subject: [PATCH 313/565] chore(deps): update stickerdaniel/linkedin-mcp-server:latest docker digest to 0f95f94 --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 200d5ff4..500cec85 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:latest@sha256:5340e9f5826b375a08172918f83aee74ef1e2232715147e5beb372ca014f7187 + image: stickerdaniel/linkedin-mcp-server:latest@sha256:0f95f94331f56f13314589ae1853b6c74137d10d5cfd51a3f068961d903ec264 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: From 893d297ec9e11f145bbb3c0f20913d37f9c6d03d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 16:56:32 +0000 Subject: [PATCH 314/565] chore(deps): update python docker tag to v3.14 --- .python-version | 2 +- Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.python-version b/.python-version index 24ee5b1b..6324d401 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.13 +3.14 diff --git a/Dockerfile b/Dockerfile index c70ad117..184c07eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.12-slim-bookworm@sha256:4a3ceab05b4e396df42a042415e43a286bb5793352b9258f889d6c7d38ed01fb +FROM python:3.14-slim-bookworm@sha256:adb6bdfbcc7c744c3b1a05976136555e2d82b7df01ac3efe71737d7f95ef0f2d # Install uv package manager COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9a23023be68b2ed09750ae636228e903a54a05ea56ed03a934d00fe9fbeded4b /uv /uvx /bin/ From 875660362affba9058efd6ba515a84a700c149e5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 18:02:35 +0100 Subject: [PATCH 315/565] docs: update .env.example to clarify transport mode options --- .env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 102fae5e..b2e5ab6b 100644 --- a/.env.example +++ b/.env.example @@ -14,7 +14,7 @@ HEADLESS=true # Options: DEBUG, INFO, WARNING, ERROR LOG_LEVEL=WARNING -# Transport mode (defaults to stdio) +# Transport mode (leave empty for interactive prompt, defaults to stdio in non-interactive) # Options: stdio, streamable-http TRANSPORT= From c59d63dd7806dd9586f4143480f7dfc252f3d0be Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 18 Jan 2026 18:06:52 +0100 Subject: [PATCH 316/565] chore(release): bump version to 2.2.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d74c834c..a8db57e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.2.0" +version = "2.2.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 5e94270a..7aefe589 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.2.0" +version = "2.2.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 58376cada70c7a73b80694dca645a5bcd92bd18e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 17:07:18 +0000 Subject: [PATCH 317/565] chore(dxt): update manifest.json version to v2.2.1 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index ce34556e..a9af13bb 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.2.0", + "version": "2.2.1", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.2.0" + "stickerdaniel/linkedin-mcp-server:2.2.1" ] } }, From 4a117bb89ae120aa152c50e3e48df0d908b0f7e4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 18 Jan 2026 21:47:48 +0000 Subject: [PATCH 318/565] chore(deps): update stickerdaniel/linkedin-mcp-server:latest docker digest to 29f0efb --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 500cec85..b9c30523 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:latest@sha256:0f95f94331f56f13314589ae1853b6c74137d10d5cfd51a3f068961d903ec264 + image: stickerdaniel/linkedin-mcp-server:latest@sha256:29f0efbf77583303d6ae8128fafe8105b92d510cdfc388d5d7431664b547673c volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: From 6283ddcd3ae7b55c2462374109d97be09c86895f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 19 Jan 2026 13:39:36 +0100 Subject: [PATCH 319/565] docs(btca): add btca for better dependency documentation access --- AGENTS.md | 18 ++++++++++ btca.config.jsonc | 85 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) create mode 100644 btca.config.jsonc diff --git a/AGENTS.md b/AGENTS.md index 0cc75f7f..09435622 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -105,3 +105,21 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis 7. First review the PR with ai agents. 8. Manually review the PR and merge it if it's approved. Do not squash the commits. 9. Delete the branch after the PR is merged. + +## btca + +When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. + +**Available resources**: fastmcp, linkedinScraper, playwright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit + +### Usage + +```bash +btca ask -r -q "" +``` + +Use multiple `-r` flags to query multiple resources at once: + +```bash +btca ask -r fastmcp -r playwright -q "How do I set up browser context with FastMCP tools?" +``` diff --git a/btca.config.jsonc b/btca.config.jsonc new file mode 100644 index 00000000..06d78806 --- /dev/null +++ b/btca.config.jsonc @@ -0,0 +1,85 @@ +{ + "$schema": "https://btca.dev/btca.schema.json", + "resources": [ + { + "name": "fastmcp", + "type": "git", + "url": "https://github.com/jlowin/fastmcp", + "branch": "main", + "specialNotes": "FastMCP server framework. Primary MCP library used in this project." + }, + { + "name": "linkedinScraper", + "type": "git", + "url": "https://github.com/joeyism/linkedin_scraper", + "branch": "master", + "specialNotes": "LinkedIn scraping library with Playwright support." + }, + { + "name": "playwright", + "type": "git", + "url": "https://github.com/microsoft/playwright-python", + "branch": "main", + "specialNotes": "Playwright Python bindings for browser automation." + }, + { + "name": "pytest", + "type": "git", + "url": "https://github.com/pytest-dev/pytest", + "branch": "main", + "specialNotes": "Python testing framework." + }, + { + "name": "ruff", + "type": "git", + "url": "https://github.com/astral-sh/ruff", + "branch": "main", + "specialNotes": "Fast Python linter and formatter written in Rust." + }, + { + "name": "ty", + "type": "git", + "url": "https://github.com/astral-sh/ty", + "branch": "main", + "specialNotes": "Fast Python type checker from Astral, written in Rust." + }, + { + "name": "uv", + "type": "git", + "url": "https://github.com/astral-sh/uv", + "branch": "main", + "specialNotes": "Fast Python package manager from Astral, written in Rust." + }, + { + "name": "inquirer", + "type": "git", + "url": "https://github.com/magmax/python-inquirer", + "branch": "master", + "specialNotes": "Python library for CLI interactive prompts." + }, + { + "name": "pythonDotenv", + "type": "git", + "url": "https://github.com/theskumar/python-dotenv", + "branch": "main", + "specialNotes": "Python library for loading .env files." + }, + { + "name": "pyperclip", + "type": "git", + "url": "https://github.com/asweigart/pyperclip", + "branch": "master", + "specialNotes": "Cross-platform Python clipboard module." + }, + { + "name": "preCommit", + "type": "git", + "url": "https://github.com/pre-commit/pre-commit", + "branch": "main", + "specialNotes": "Framework for managing pre-commit hooks." + } + ], + "model": "claude-haiku-4-5", + "provider": "anthropic", + "providerTimeoutMs": 300000 +} From 7be81ab1b774f4a6fe6ec8c0bc34680b4d89cf25 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 19 Jan 2026 13:46:18 +0100 Subject: [PATCH 320/565] chore(docs): Cleanup old submodule setup. --- .gitmodules | 3 --- docs/references/linkedin_scraper | 1 - pyproject.toml | 3 --- 3 files changed, 7 deletions(-) delete mode 100644 .gitmodules delete mode 160000 docs/references/linkedin_scraper diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 05892e55..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "docs/references/linkedin_scraper"] - path = docs/references/linkedin_scraper - url = https://github.com/joeyism/linkedin_scraper.git diff --git a/docs/references/linkedin_scraper b/docs/references/linkedin_scraper deleted file mode 160000 index 647e88ab..00000000 --- a/docs/references/linkedin_scraper +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 647e88abbd38719e48ac7a340ec905a8f6a69a06 diff --git a/pyproject.toml b/pyproject.toml index a8db57e7..f529ec23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,3 @@ dev = [ "ruff>=0.11.11", "ty>=0.0.1a12", ] - -[tool.ty.src] -exclude = ["docs/references/"] From 06d957590fde03146a69d8b590ee2a00fe608cb0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 19 Jan 2026 14:33:18 +0100 Subject: [PATCH 321/565] feat(scraper): upgrade to linkedin-scraper 3.1.0 - Add get_company_posts tool for scraping company feed posts - Enable browser warm-up during session creation - Fix search_jobs (was broken upstream, now working) - Expose new person fields: contacts, interests - Update tool docstrings with enhanced field documentation --- AGENTS.md | 15 ++++++- README.md | 11 ++++-- docs/docker-hub.md | 3 +- linkedin_mcp_server/setup.py | 23 +++++++---- linkedin_mcp_server/tools/company.py | 58 ++++++++++++++++++++++++++-- linkedin_mcp_server/tools/person.py | 13 ++++++- pyproject.toml | 2 +- uv.lock | 8 ++-- 8 files changed, 110 insertions(+), 23 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 09435622..566d6c1d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -43,10 +43,21 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Categories:** -- **Person Tools** (`tools/person.py`) - Profile scraping from LinkedIn URLs -- **Company Tools** (`tools/company.py`) - Company profile extraction +- **Person Tools** (`tools/person.py`) - Profile scraping with contacts, interests, experiences, education +- **Company Tools** (`tools/company.py`) - Company profile and posts extraction - **Job Tools** (`tools/job.py`) - Job posting details and search functionality +**Available MCP Tools:** + +| Tool | Description | +|------|-------------| +| `get_person_profile` | Get profile with contacts (email/phone/social), interests, experiences, education | +| `get_company_profile` | Get company info with employees, affiliated companies, showcase pages | +| `get_company_posts` | Get recent posts from company feed with reactions/comments/images | +| `get_job_details` | Get job posting details including description and benefits | +| `search_jobs` | Search jobs by keywords and location | +| `close_session` | Close browser session and clean up resources | + **Authentication Flow:** - Uses session files stored at `~/.linkedin-mcp/session.json` diff --git a/README.md b/README.md index 0197341a..2454136f 100644 --- a/README.md +++ b/README.md @@ -31,13 +31,18 @@ Get this company profile for partnership discussions https://www.linkedin.com/co Suggest improvements for my CV to target this job posting https://www.linkedin.com/jobs/view/4252026496 ``` +``` +What has Anthropic been posting about recently? https://www.linkedin.com/company/anthropic/ +``` + ## Features & Tool Status | Tool | Description | Status | |------|-------------|--------| -| `get_person_profile` | Get detailed profile info including work history, education, skills | Working | -| `get_company_profile` | Extract company information from a LinkedIn company name | Working | -| `search_jobs` | Search for jobs with keywords and location filters | Broken (upstream) | +| `get_person_profile` | Get detailed profile info including work history, education, contacts, interests | Working | +| `get_company_profile` | Extract company information including employees, affiliated companies | Working | +| `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | +| `search_jobs` | Search for jobs with keywords and location filters | Working | | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 353eb6de..4de49ca9 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -7,7 +7,8 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A - **Profile Access**: Get detailed LinkedIn profile information - **Company Profiles**: Extract comprehensive company data - **Job Details**: Retrieve job posting information -- **Job Search**: Search for jobs with keywords and location filters (currently broken upstream) +- **Job Search**: Search for jobs with keywords and location filters +- **Company Posts**: Get recent posts from a company's LinkedIn feed ## Quick Start diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index d1fbb2ae..b90c6fa2 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -10,13 +10,16 @@ from pathlib import Path from linkedin_scraper import BrowserManager, wait_for_manual_login +from linkedin_scraper.core import warm_up_browser from linkedin_mcp_server.drivers.browser import DEFAULT_SESSION_PATH logger = logging.getLogger(__name__) -async def interactive_login_and_save(session_path: Path | None = None) -> bool: +async def interactive_login_and_save( + session_path: Path | None = None, warm_up: bool = True +) -> bool: """ Open browser for manual LinkedIn login and save session. @@ -25,6 +28,7 @@ async def interactive_login_and_save(session_path: Path | None = None) -> bool: Args: session_path: Path to save session. Defaults to ~/.linkedin-mcp/session.json + warm_up: Visit normal sites first to appear more human-like (default: True) Returns: True if login was successful and session was saved @@ -35,11 +39,16 @@ async def interactive_login_and_save(session_path: Path | None = None) -> bool: if session_path is None: session_path = DEFAULT_SESSION_PATH - print("๐Ÿ”— Opening browser for LinkedIn login...") + print("Opening browser for LinkedIn login...") print(" Please log in manually. You have 5 minutes to complete authentication.") print(" (This handles 2FA, captcha, and any security challenges)") async with BrowserManager(headless=False) as browser: + # Warm up browser to appear more human-like and avoid security checkpoints + if warm_up: + print(" Warming up browser (visiting normal sites first)...") + await warm_up_browser(browser.page) + # Navigate to LinkedIn login await browser.page.goto("https://www.linkedin.com/login") @@ -51,7 +60,7 @@ async def interactive_login_and_save(session_path: Path | None = None) -> bool: session_path.parent.mkdir(parents=True, exist_ok=True) await browser.save_session(str(session_path)) - print(f"โœ… Session saved to {session_path}") + print(f"Session saved to {session_path}") return True @@ -71,14 +80,14 @@ def run_session_creation(output_path: str | None = None) -> bool: else: session_path = DEFAULT_SESSION_PATH - print("๐Ÿ”— LinkedIn MCP Server - Session Creation") + print("LinkedIn MCP Server - Session Creation") print(f" Session will be saved to: {session_path}") try: success = asyncio.run(interactive_login_and_save(session_path)) return success except Exception as e: - print(f"โŒ Session creation failed: {e}") + print(f"Session creation failed: {e}") return False @@ -89,11 +98,11 @@ def run_interactive_setup() -> bool: Returns: True if setup completed successfully """ - print("๐Ÿ”— LinkedIn MCP Server Setup") + print("LinkedIn MCP Server Setup") print(" Opening browser for manual login...") try: return asyncio.run(interactive_login_and_save()) except Exception as e: - print(f"โŒ Login failed: {e}") + print(f"Login failed: {e}") return False diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index c16910f0..7764da8e 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -9,7 +9,7 @@ from typing import Any, Dict from fastmcp import Context, FastMCP -from linkedin_scraper import CompanyScraper +from linkedin_scraper import CompanyPostsScraper, CompanyScraper from mcp.types import ToolAnnotations from linkedin_mcp_server.callbacks import MCPContextProgressCallback @@ -47,8 +47,13 @@ async def get_company_profile(company_name: str, ctx: Context) -> Dict[str, Any] ctx: FastMCP context for progress reporting Returns: - Structured data from the company's profile including name, about, - headquarters, industry, size, and more. + Structured data from the company's profile including: + - linkedin_url, name, about_us, website, phone + - headquarters, founded, industry, company_type, company_size + - specialties, headcount + - showcase_pages: List of showcase pages (linkedin_url, name, followers) + - affiliated_companies: List of affiliated companies + - employees: List of employees (name, designation, linkedin_url) """ try: # Validate session before scraping @@ -69,3 +74,50 @@ async def get_company_profile(company_name: str, ctx: Context) -> Dict[str, Any] except Exception as e: return handle_tool_error(e, "get_company_profile") + + @mcp.tool( + annotations=ToolAnnotations( + title="Get Company Posts", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) + async def get_company_posts( + company_name: str, ctx: Context, limit: int = 10 + ) -> Dict[str, Any]: + """ + Get recent posts from a company's LinkedIn feed. + + Args: + company_name: LinkedIn company name (e.g., "docker", "anthropic", "microsoft") + ctx: FastMCP context for progress reporting + limit: Maximum number of posts to return (default: 10) + + Returns: + Dict with posts list containing: + - linkedin_url, urn, text, posted_date + - reactions_count, comments_count, reposts_count + - image_urls: List of image URLs + - video_url: Video URL if present + - article_url: Article URL if present + """ + try: + # Validate session before scraping + await ensure_authenticated() + + # Construct LinkedIn URL from company name + linkedin_url = f"https://www.linkedin.com/company/{company_name}/" + + logger.info(f"Scraping company posts: {linkedin_url} (limit: {limit})") + + browser = await get_or_create_browser() + scraper = CompanyPostsScraper( + browser.page, callback=MCPContextProgressCallback(ctx) + ) + posts = await scraper.scrape(linkedin_url, limit=limit) + + return {"posts": [post.to_dict() for post in posts], "count": len(posts)} + + except Exception as e: + return handle_tool_error(e, "get_company_posts") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 8f0a030a..4701533e 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -49,8 +49,17 @@ async def get_person_profile( ctx: FastMCP context for progress reporting Returns: - Structured data from the person's profile including name, about, - experiences, educations, and more. + Structured data from the person's profile including: + - linkedin_url, name, location, about, open_to_work + - experiences: List of work history (position_title, institution_name, + linkedin_url, from_date, to_date, duration, location, description) + - educations: List of education (institution_name, degree, linkedin_url, + from_date, to_date, description) + - interests: List of interests with category (company, group, school, + newsletter, influencer) and linkedin_url + - accomplishments: List of accomplishments (category, title) + - contacts: List of contact info (type: email/phone/website/linkedin/ + twitter/birthday/address, value, label) """ try: # Validate session before scraping diff --git a/pyproject.toml b/pyproject.toml index f529ec23..54cd4950 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ requires-python = ">=3.12" dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper>=3.0.0", + "linkedin-scraper>=3.1.0", "playwright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/uv.lock b/uv.lock index 7aefe589..8fd04f6d 100644 --- a/uv.lock +++ b/uv.lock @@ -853,7 +853,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", specifier = ">=3.0.0" }, + { name = "linkedin-scraper", specifier = ">=3.1.0" }, { name = "playwright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -872,7 +872,7 @@ dev = [ [[package]] name = "linkedin-scraper" -version = "3.0.1" +version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -882,9 +882,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/ac/af65e5359fcdd08d0cc194674e67106ce40027d5f55142243887681e0462/linkedin_scraper-3.0.1.tar.gz", hash = "sha256:6e9c54fd6b78003d0be370bbfacb69b52bb023c7c07bcc9d8b508d94048ea058", size = 39638, upload-time = "2026-01-07T03:09:52.482Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/30/967d78a67bc974e65491582e23993ca078d47c7b634842af13c8422162b9/linkedin_scraper-3.1.0.tar.gz", hash = "sha256:830bd3a4c16aeb667f5a00c0eed7528c80e0b360016f4c8eecd9cebad0d8728e", size = 46636, upload-time = "2026-01-18T23:55:47.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/c5/7fc84e2fca5608b6c8eec36db4f14e8dd4e59a059da84deba94c49faa875/linkedin_scraper-3.0.1-py3-none-any.whl", hash = "sha256:e121f963d17e0fc1503a4fd1b7c37fb9ccdcfc587dae4ca3defc073a81aff522", size = 44724, upload-time = "2026-01-07T03:09:51.478Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a7/ce6de57a4bd75bfadaa23fb8f3eaa0b86de779335c13be08f8bbf3846438/linkedin_scraper-3.1.0-py3-none-any.whl", hash = "sha256:1e3ad52cd858d25034cab5f82261bfe35451941faec6003714aff2e745939212", size = 52372, upload-time = "2026-01-18T23:55:45.745Z" }, ] [[package]] From 5cfa259ad35e66173918bcc4fb717855f6a6cce5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 19 Jan 2026 14:50:12 +0100 Subject: [PATCH 322/565] fix small issues --- linkedin_mcp_server/tools/company.py | 14 ++++++++------ linkedin_mcp_server/tools/person.py | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 7764da8e..e0b6a512 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -95,12 +95,14 @@ async def get_company_posts( limit: Maximum number of posts to return (default: 10) Returns: - Dict with posts list containing: - - linkedin_url, urn, text, posted_date - - reactions_count, comments_count, reposts_count - - image_urls: List of image URLs - - video_url: Video URL if present - - article_url: Article URL if present + Dict containing: + - count: Number of posts returned + - posts: List of post dicts with: + - linkedin_url, urn, text, posted_date + - reactions_count, comments_count, reposts_count + - image_urls: List of image URLs + - video_url: Video URL if present + - article_url: Article URL if present """ try: # Validate session before scraping diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 4701533e..247ad1b7 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -2,7 +2,7 @@ LinkedIn person profile scraping tools. Provides MCP tools for extracting comprehensive LinkedIn profile information including -experience, education, skills, and contact details. +experience, education, interests, accomplishments, and contact details. """ import logging From 8daa7154c1567d7fc5ae13ac06dba5f3cf17635b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 19 Jan 2026 15:48:26 +0100 Subject: [PATCH 323/565] chore: bump version to 2.3.0 for linkedin-mcp-server --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 54cd4950..b957b16a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.2.1" +version = "2.3.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 8fd04f6d..02e634a0 100644 --- a/uv.lock +++ b/uv.lock @@ -827,7 +827,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.2.1" +version = "2.3.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 77d1773b6bcff5d77b756e17f8664f69b15ee2c4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 19 Jan 2026 14:49:00 +0000 Subject: [PATCH 324/565] chore(dxt): update manifest.json version to v2.3.0 [skip ci] --- manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index a9af13bb..45c36647 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.2.1", + "version": "2.3.0", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.2.1" + "stickerdaniel/linkedin-mcp-server:2.3.0" ] } }, From 24a525efd0b2eb05a6c0f91a2ac1bededab80863 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 23 Jan 2026 14:30:47 +0100 Subject: [PATCH 325/565] fix(dxt): add pre-pull instructions to avoid docker pull timeout --- README.md | 10 ++++++++++ RELEASE_NOTES_TEMPLATE.md | 10 +++++++--- manifest.json | 2 +- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 2454136f..d5e00954 100644 --- a/README.md +++ b/README.md @@ -327,6 +327,16 @@ docker run -it --rm \
โ— Troubleshooting +**First-time setup timeout:** + +- Claude Desktop has a ~60 second connection timeout +- If the Docker image isn't cached, the pull may exceed this timeout +- **Fix:** Pre-pull the image before first use: + ```bash + docker pull stickerdaniel/linkedin-mcp-server:2.3.0 + ``` +- Then restart Claude Desktop + **Docker issues:** - Make sure [Docker](https://www.docker.com/get-started/) is installed diff --git a/RELEASE_NOTES_TEMPLATE.md b/RELEASE_NOTES_TEMPLATE.md index 0aa23401..9b240fb1 100644 --- a/RELEASE_NOTES_TEMPLATE.md +++ b/RELEASE_NOTES_TEMPLATE.md @@ -14,7 +14,11 @@ docker pull stickerdaniel/linkedin-mcp-server:${VERSION} ## ๐Ÿ“ฆ Update DXT Extension Installation **For Claude Desktop users:** 1. Download the `.dxt` file below -2. Double-click to install in Claude Desktop -3. Restart Claude Desktop +2. Pre-pull the Docker image to avoid timeout issues: + ```bash + docker pull stickerdaniel/linkedin-mcp-server:${VERSION} + ``` +3. Double-click the `.dxt` file to install in Claude Desktop +4. Restart Claude Desktop -This DXT extension uses the pinned version `${VERSION}`, the Docker image will be pulled automatically. +> **Note:** The pre-pull step is important because Claude Desktop has a ~60 second connection timeout. Without pre-pulling, the initial image download may exceed this limit. diff --git a/manifest.json b/manifest.json index 45c36647..5f6f651c 100644 --- a/manifest.json +++ b/manifest.json @@ -4,7 +4,7 @@ "display_name": "LinkedIn MCP Server", "version": "2.3.0", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\nCreate a session using one of these methods:\n1. **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable - session will be created and stored automatically\n2. **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively - session will be stored and used by this extension\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.0\n```\n\n### 2. Create LinkedIn Session\nUse one of these methods:\n- **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable\n- **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", From 9e8074a2402a28acc28dce49ba9cc8fe746b3604 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 23 Jan 2026 22:33:41 +0000 Subject: [PATCH 326/565] chore(deps): update anthropics/claude-code-action digest to f642197 --- .github/workflows/claude.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index f35ee4dc..e8c339f2 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@a017b830c03e23789b11fb69ed571ea61c12e45c # v1 + uses: anthropics/claude-code-action@f64219702d7454cf29fe32a74104be6ed43dc637 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} From dbd5f5ce118ada9018b0896a816eab223ce5492d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 12:13:49 +0100 Subject: [PATCH 327/565] refactor: pin docker-compose.yml to version tag instead of digest Update release workflow to also update docker-compose.yml version. This eliminates Renovate noise for digest updates. --- .github/workflows/release.yml | 13 +++++++------ docker-compose.yml | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dda24394..870d65ab 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,25 +69,26 @@ jobs: - name: Set up Bun uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2 - - name: Update manifest.json version and Docker image + - name: Update manifest.json and docker-compose.yml version run: | set -e sed -i 's/"version": ".*"/"version": "'$VERSION'"/' manifest.json sed -i 's/stickerdaniel\/linkedin-mcp-server:[^"]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' manifest.json - echo "โœ… Updated manifest.json to version $VERSION" + sed -i 's/stickerdaniel\/linkedin-mcp-server:[^ ]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' docker-compose.yml + echo "โœ… Updated manifest.json and docker-compose.yml to version $VERSION" - - name: Commit manifest update + - name: Commit version updates run: | set -e git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" git config --local user.name "github-actions[bot]" - git add manifest.json + git add manifest.json docker-compose.yml if git diff --staged --quiet; then echo "โ„น๏ธ No changes to commit" else - git commit -m "chore(dxt): update manifest.json version to v$VERSION [skip ci]" + git commit -m "chore: update manifest.json and docker-compose.yml to v$VERSION [skip ci]" git push origin main - echo "โœ… Committed manifest.json update" + echo "โœ… Committed version updates" fi - name: Create release tag diff --git a/docker-compose.yml b/docker-compose.yml index b9c30523..8900ae64 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:latest@sha256:29f0efbf77583303d6ae8128fafe8105b92d510cdfc388d5d7431664b547673c + image: stickerdaniel/linkedin-mcp-server:2.3.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: From 4f104632da286d0bbebad284a67678ccd779c264 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 20:22:46 +0100 Subject: [PATCH 328/565] feat: add --chrome-path flag for custom browser executable Add --chrome-path CLI flag and CHROME_PATH environment variable to allow users to specify a custom Chrome/Chromium executable path. Resolves: #118 --- .env.example | 3 +++ README.md | 18 ++++++++++++++++++ docs/docker-hub.md | 1 + linkedin_mcp_server/config/loaders.py | 16 ++++++++++++++++ linkedin_mcp_server/config/schema.py | 12 ++++++++++++ linkedin_mcp_server/drivers/browser.py | 8 ++++++++ 6 files changed, 58 insertions(+) diff --git a/.env.example b/.env.example index b2e5ab6b..0eec0421 100644 --- a/.env.example +++ b/.env.example @@ -34,3 +34,6 @@ HTTP_PATH=/mcp SLOW_MO=0 # Browser viewport size as WIDTHxHEIGHT (default: 1280x720) VIEWPORT=1280x720 +# Custom Chrome/Chromium executable path (optional) +# Use this if Chrome is installed in a non-standard location +CHROME_PATH= diff --git a/README.md b/README.md index d5e00954..dbfcaa9b 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - `--path PATH` - HTTP server path (default: /mcp) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +- `--chrome-path PATH` - Path to Chrome/Chromium executable (for custom browser installations) **Basic Usage Examples:** @@ -166,6 +167,11 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp - Users on slow connections may need higher values (e.g., 15000-30000ms) - Can also set via environment variable: `TIMEOUT=10000` +**Custom Chrome path:** + +- If Chrome is installed in a non-standard location, use `--chrome-path /path/to/chrome` +- Can also set via environment variable: `CHROME_PATH=/path/to/chrome` +

@@ -259,6 +265,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - `--path PATH` - HTTP server path (default: /mcp) - `--clear-session` - Clear stored LinkedIn session file - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +- `--chrome-path PATH` - Path to Chrome/Chromium executable (rarely needed in Docker) > [!NOTE] > `--get-session` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create sessions. @@ -304,6 +311,11 @@ docker run -it --rm \ - Users on slow connections may need higher values (e.g., 15000-30000ms) - Can also set via environment variable: `TIMEOUT=10000` +**Custom Chrome path:** + +- If Chrome is installed in a non-standard location, use `--chrome-path /path/to/chrome` +- Can also set via environment variable: `CHROME_PATH=/path/to/chrome` +
@@ -411,6 +423,7 @@ uv run -m linkedin_mcp_server - `--slow-mo MS` - Delay between browser actions in milliseconds (default: 0, useful for debugging) - `--user-agent STRING` - Custom browser user agent - `--viewport WxH` - Browser viewport size (default: 1280x720) +- `--chrome-path PATH` - Path to Chrome/Chromium executable (for custom browser installations) - `--help` - Show help > **Note:** Most CLI options have environment variable equivalents. See `.env.example` for details. @@ -467,6 +480,11 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por - Users on slow connections may need higher values (e.g., 15000-30000ms) - Can also set via environment variable: `TIMEOUT=10000` +**Custom Chrome path:** + +- If Chrome is installed in a non-standard location, use `--chrome-path /path/to/chrome` +- Can also set via environment variable: `CHROME_PATH=/path/to/chrome` + Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 4de49ca9..16128902 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -67,6 +67,7 @@ Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin | `HTTP_PATH` | `/mcp` | HTTP server path (for streamable-http transport) | | `SLOW_MO` | `0` | Delay between browser actions in ms (debugging) | | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | +| `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | **Example with custom timeout:** diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index f7cbdd6b..04ad5c2f 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -45,6 +45,7 @@ class EnvironmentKeys: HTTP_PATH = "HTTP_PATH" SLOW_MO = "SLOW_MO" VIEWPORT = "VIEWPORT" + CHROME_PATH = "CHROME_PATH" def is_interactive_environment() -> bool: @@ -139,6 +140,10 @@ def load_from_env(config: AppConfig) -> AppConfig: f"Invalid VIEWPORT: '{viewport_env}'. Must be in format WxH (e.g., 1280x720)." ) + # Custom Chrome/Chromium executable path + if chrome_path_env := os.environ.get(EnvironmentKeys.CHROME_PATH): + config.browser.chrome_path = chrome_path_env + return config @@ -220,6 +225,14 @@ def load_from_args(config: AppConfig) -> AppConfig: help="Browser timeout for page operations in milliseconds (default: 5000)", ) + parser.add_argument( + "--chrome-path", + type=str, + default=None, + metavar="PATH", + help="Path to Chrome/Chromium executable (for custom browser installations)", + ) + # Session management parser.add_argument( "--get-session", @@ -292,6 +305,9 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.timeout is not None: config.browser.default_timeout = args.timeout + if args.chrome_path: + config.browser.chrome_path = args.chrome_path + # Session management if args.get_session is not None: config.server.get_session = True diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index bbba970f..6afaf9fd 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -6,6 +6,7 @@ """ from dataclasses import dataclass, field +from pathlib import Path from typing import Literal @@ -25,6 +26,7 @@ class BrowserConfig: viewport_width: int = 1280 viewport_height: int = 720 default_timeout: int = 5000 # Milliseconds for page operations + chrome_path: str | None = None # Path to Chrome/Chromium executable def validate(self) -> None: """Validate browser configuration values.""" @@ -40,6 +42,16 @@ def validate(self) -> None: raise ConfigurationError( f"viewport dimensions must be positive, got {self.viewport_width}x{self.viewport_height}" ) + if self.chrome_path: + chrome_path = Path(self.chrome_path) + if not chrome_path.exists(): + raise ConfigurationError( + f"chrome_path '{self.chrome_path}' does not exist" + ) + if not chrome_path.is_file(): + raise ConfigurationError( + f"chrome_path '{self.chrome_path}' is not a file" + ) @dataclass diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index ce43ddaa..5c6835df 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -71,6 +71,13 @@ async def get_or_create_browser( "width": config.browser.viewport_width, "height": config.browser.viewport_height, } + + # Build launch options for custom browser path + launch_options: dict[str, str] = {} + if config.browser.chrome_path: + launch_options["executable_path"] = config.browser.chrome_path + logger.info("Using custom Chrome path: %s", config.browser.chrome_path) + logger.info( "Creating new browser (headless=%s, slow_mo=%sms, viewport=%sx%s)", _headless, @@ -83,6 +90,7 @@ async def get_or_create_browser( slow_mo=config.browser.slow_mo, user_agent=config.browser.user_agent, viewport=viewport, + **launch_options, ) await _browser.start() From aed1cceb2ab264a9073e445014b566cf42116dca Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 20:22:51 +0100 Subject: [PATCH 329/565] chore: update agent model configs --- .opencode/agents/code-reviewer.md | 5 +++-- .opencode/agents/code-simplifier.md | 5 +++-- .opencode/agents/comment-analyzer.md | 5 +++-- .opencode/agents/pr-test-analyzer.md | 5 +++-- .opencode/agents/silent-failure-hunter.md | 5 +++-- .opencode/agents/type-design-analyzer.md | 5 +++-- 6 files changed, 18 insertions(+), 12 deletions(-) diff --git a/.opencode/agents/code-reviewer.md b/.opencode/agents/code-reviewer.md index f6a41dcb..d0ee6897 100644 --- a/.opencode/agents/code-reviewer.md +++ b/.opencode/agents/code-reviewer.md @@ -2,8 +2,9 @@ name: code-reviewer mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#22c55e' description: | Use this agent when you need to review code for adherence to project guidelines, style guides, and best practices. This agent should be used proactively after writing or modifying code, especially before committing changes or creating pull requests. It will check for style violations, potential issues, and ensure code follows the established patterns in CLAUDE.md. Also the agent needs to know which files to focus on for the review. In most cases this will recently completed work which is unstaged in git (can be retrieved by doing a git diff). However there can be cases where this is different, make sure to specify this as the agent input when calling the agent. diff --git a/.opencode/agents/code-simplifier.md b/.opencode/agents/code-simplifier.md index dfd3236d..5124308d 100644 --- a/.opencode/agents/code-simplifier.md +++ b/.opencode/agents/code-simplifier.md @@ -2,8 +2,9 @@ name: code-simplifier mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#3b82f6' description: | Use this agent when code has been written or modified and needs to be simplified for clarity, consistency, and maintainability while preserving all functionality. This agent should be triggered automatically after completing a coding task or writing a logical chunk of code. It simplifies code by following project best practices while retaining all functionality. The agent focuses only on recently modified code unless instructed otherwise. diff --git a/.opencode/agents/comment-analyzer.md b/.opencode/agents/comment-analyzer.md index ad1138d2..52f02d72 100644 --- a/.opencode/agents/comment-analyzer.md +++ b/.opencode/agents/comment-analyzer.md @@ -2,8 +2,9 @@ name: comment-analyzer mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#10b981' description: | Use this agent when you need to analyze code comments for accuracy, completeness, and long-term maintainability. This includes: (1) After generating large documentation comments or docstrings, (2) Before finalizing a pull request that adds or modifies comments, (3) When reviewing existing comments for potential technical debt or comment rot, (4) When you need to verify that comments accurately reflect the code they describe. diff --git a/.opencode/agents/pr-test-analyzer.md b/.opencode/agents/pr-test-analyzer.md index c6ba40a4..3e45a90e 100644 --- a/.opencode/agents/pr-test-analyzer.md +++ b/.opencode/agents/pr-test-analyzer.md @@ -2,8 +2,9 @@ name: pr-test-analyzer mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#06b6d4' description: | Use this agent when you need to review a pull request for test coverage quality and completeness. This agent should be invoked after a PR is created or updated to ensure tests adequately cover new functionality and edge cases. Examples: diff --git a/.opencode/agents/silent-failure-hunter.md b/.opencode/agents/silent-failure-hunter.md index 0abf98e4..3b6e467f 100644 --- a/.opencode/agents/silent-failure-hunter.md +++ b/.opencode/agents/silent-failure-hunter.md @@ -2,8 +2,9 @@ name: silent-failure-hunter mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#eab308' description: | Use this agent when reviewing code changes in a pull request to identify silent failures, inadequate error handling, and inappropriate fallback behavior. This agent should be invoked proactively after completing a logical chunk of work that involves error handling, catch blocks, fallback logic, or any code that could potentially suppress errors. Examples: diff --git a/.opencode/agents/type-design-analyzer.md b/.opencode/agents/type-design-analyzer.md index 8850259b..2b9549f3 100644 --- a/.opencode/agents/type-design-analyzer.md +++ b/.opencode/agents/type-design-analyzer.md @@ -2,8 +2,9 @@ name: type-design-analyzer mode: subagent # https://models.dev/ -# model: 'openai/gpt-5.2-codex' -model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.2-codex' +variant: 'xhigh' +# model: 'github-copilot/gpt-5.2-codex' color: '#ec4899' description: | Use this agent when you need expert analysis of type design in your codebase. Specifically use it: (1) when introducing a new type to ensure it follows best practices for encapsulation and invariant expression, (2) during pull request creation to review all types being added, (3) when refactoring existing types to improve their design quality. The agent will provide both qualitative feedback and quantitative ratings on encapsulation, invariant expression, usefulness, and enforcement. From 20a4ec2710d8214045e9668c6251d8c5a08289f5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 20:22:55 +0100 Subject: [PATCH 330/565] chore: disable renovate for self-referential docker image --- renovate.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/renovate.json b/renovate.json index 6862bea6..8ba7f14d 100644 --- a/renovate.json +++ b/renovate.json @@ -7,6 +7,12 @@ "schedule": ["at any time"] }, "packageRules": [ + { + "matchPackageNames": ["stickerdaniel/linkedin-mcp-server"], + "matchManagers": ["docker-compose"], + "enabled": false, + "description": "Managed by release workflow, not Renovate" + }, { "matchPackageNames": ["fastmcp", "mcp"], "matchUpdateTypes": ["minor", "patch"], From c301fca2c3425709ac8b350929232cf725abb0b9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 20:47:17 +0100 Subject: [PATCH 331/565] chore(deps): update lockfile for Python 3.14 compatibility Upgrade packages to get Python 3.14 wheel support: - lxml 6.0.0 -> 6.0.2 (adds cp314 wheels) - pydantic-core 2.33.2 -> 2.41.5 (adds cp314 wheels) - Other dependencies upgraded for compatibility --- uv.lock | 2023 ++++++++++++++++++++++++++++++++----------------------- 1 file changed, 1194 insertions(+), 829 deletions(-) diff --git a/uv.lock b/uv.lock index 5e94270a..ce1d3c8b 100644 --- a/uv.lock +++ b/uv.lock @@ -22,7 +22,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.13" +version = "3.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -33,42 +33,76 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, - { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, - { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, - { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, - { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, - { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, - { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, - { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, - { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, - { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, - { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, - { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910, upload-time = "2025-06-14T15:14:30.604Z" }, - { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566, upload-time = "2025-06-14T15:14:32.275Z" }, - { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856, upload-time = "2025-06-14T15:14:34.132Z" }, - { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683, upload-time = "2025-06-14T15:14:36.034Z" }, - { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946, upload-time = "2025-06-14T15:14:38Z" }, - { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017, upload-time = "2025-06-14T15:14:39.951Z" }, - { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390, upload-time = "2025-06-14T15:14:42.151Z" }, - { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719, upload-time = "2025-06-14T15:14:44.039Z" }, - { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424, upload-time = "2025-06-14T15:14:45.945Z" }, - { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447, upload-time = "2025-06-14T15:14:47.911Z" }, - { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110, upload-time = "2025-06-14T15:14:50.334Z" }, - { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706, upload-time = "2025-06-14T15:14:52.378Z" }, - { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839, upload-time = "2025-06-14T15:14:54.617Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311, upload-time = "2025-06-14T15:14:56.597Z" }, - { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202, upload-time = "2025-06-14T15:14:58.598Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794, upload-time = "2025-06-14T15:15:00.939Z" }, - { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735, upload-time = "2025-06-14T15:15:02.858Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, + { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, + { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, + { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, + { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, + { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, + { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, + { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, + { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, + { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, + { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, + { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, + { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, + { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, + { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, + { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, + { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, + { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, + { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, + { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, + { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, + { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, + { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, + { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, + { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, + { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, ] [[package]] @@ -104,25 +138,24 @@ wheels = [ [[package]] name = "anyio" -version = "4.9.0" +version = "4.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, - { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] [[package]] name = "attrs" -version = "25.3.0" +version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] @@ -148,16 +181,15 @@ wheels = [ [[package]] name = "blessed" -version = "1.20.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinxed", marker = "sys_platform == 'win32'" }, - { name = "six" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/ae/92e9968ad23205389ec6bd82e2d4fca3817f1cdef34e10aa8d529ef8b1d7/blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680", size = 6655612, upload-time = "2023-02-04T02:25:45.886Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/3c/783f2a400e5dac56ad073997aa6aa47150c3b06a5ce8ad2f537f3691eaaa/blessed-1.27.0.tar.gz", hash = "sha256:e3064559388bd532ab6460d9b6c7d6dd699c4e0cf54d28ed6e2cab12feda13bb", size = 6761573, upload-time = "2026-01-20T04:16:56.233Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/98/584f211c3a4bb38f2871fa937ee0cc83c130de50c955d6c7e2334dbf4acb/blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058", size = 58372, upload-time = "2023-02-04T02:25:43.093Z" }, + { url = "https://files.pythonhosted.org/packages/30/d9/11d745a88e9000729fc4d9e813789a95327beda325e04ec311e9ae23a30e/blessed-1.27.0-py3-none-any.whl", hash = "sha256:1c599969acc993bb5842bf3f638b0691e335277a9d9058cd079463a346988714", size = 101305, upload-time = "2026-01-20T04:16:54.095Z" }, ] [[package]] @@ -180,91 +212,137 @@ wheels = [ [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] [[package]] name = "cfgv" -version = "3.4.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, + { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] name = "click" -version = "8.1.8" +version = "8.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] [[package]] @@ -287,84 +365,137 @@ wheels = [ [[package]] name = "coverage" -version = "7.8.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, - { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, - { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, - { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, - { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, - { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, - { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, - { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, - { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, - { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, - { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, - { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, - { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, - { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, - { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, - { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, - { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, - { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, - { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, - { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, - { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, - { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, - { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, - { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, - { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, - { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, - { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, - { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, +version = "7.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, + { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, + { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, + { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, + { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, + { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, + { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, + { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, + { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, + { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, + { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, + { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, + { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, + { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, + { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, + { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, + { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, + { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, + { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, + { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, + { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, + { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, ] [[package]] name = "cryptography" -version = "44.0.3" +version = "46.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" }, - { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" }, - { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" }, - { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" }, - { url = "https://files.pythonhosted.org/packages/42/b2/7d31f2af5591d217d71d37d044ef5412945a8a8e98d5a2a8ae4fd9cd4489/cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359", size = 3689263, upload-time = "2025-05-02T19:34:58.591Z" }, - { url = "https://files.pythonhosted.org/packages/25/50/c0dfb9d87ae88ccc01aad8eb93e23cfbcea6a6a106a9b63a7b14c1f93c75/cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43", size = 4196198, upload-time = "2025-05-02T19:35:00.988Z" }, - { url = "https://files.pythonhosted.org/packages/66/c9/55c6b8794a74da652690c898cb43906310a3e4e4f6ee0b5f8b3b3e70c441/cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01", size = 3966502, upload-time = "2025-05-02T19:35:03.091Z" }, - { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" }, - { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" }, - { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" }, - { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" }, - { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" }, - { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" }, - { url = "https://files.pythonhosted.org/packages/01/44/eb6522db7d9f84e8833ba3bf63313f8e257729cf3a8917379473fcfd6601/cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54", size = 3689164, upload-time = "2025-05-02T19:35:21.449Z" }, - { url = "https://files.pythonhosted.org/packages/68/fb/d61a4defd0d6cee20b1b8a1ea8f5e25007e26aeb413ca53835f0cae2bcd1/cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93", size = 4198081, upload-time = "2025-05-02T19:35:23.187Z" }, - { url = "https://files.pythonhosted.org/packages/1b/50/457f6911d36432a8811c3ab8bd5a6090e8d18ce655c22820994913dd06ea/cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c", size = 3967716, upload-time = "2025-05-02T19:35:25.426Z" }, - { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" }, - { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" }, - { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, ] [[package]] name = "cyclopts" -version = "4.4.3" +version = "4.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -372,9 +503,9 @@ dependencies = [ { name = "rich" }, { name = "rich-rst" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/21/732453ae69d65d72fe37a34f8b1a455c72313b8b0a905b876da20ff7e81a/cyclopts-4.4.3.tar.gz", hash = "sha256:03797c71b49a39dcad8324d6655363056fb998e2ba0240940050331a7f63fe65", size = 159360, upload-time = "2025-12-28T18:57:03.831Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/7b/663f3285c1ac0e5d0854bd9db2c87caa6fa3d1a063185e3394a6cdca9151/cyclopts-4.5.0.tar.gz", hash = "sha256:717ac4235548b58d500baf7e688aa4d024caf0ee68f61a012ffd5e29db3099f9", size = 161980, upload-time = "2026-01-16T02:07:16.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/28/03f9b8fbf396b3f2eaf65a7ff441ba2fb7dd397109d563a4e556dc5b3efb/cyclopts-4.4.3-py3-none-any.whl", hash = "sha256:951611a9d4d88d9916716ae281faca9af1cb79b88bb4f22bd0192cff54e7dec6", size = 196707, upload-time = "2025-12-28T18:57:04.884Z" }, + { url = "https://files.pythonhosted.org/packages/12/a3/2e00fececc34a99ae3a5d5702a5dd29c5371e4ed016647301a2b9bcc1976/cyclopts-4.5.0-py3-none-any.whl", hash = "sha256:305b9aa90a9cd0916f0a450b43e50ad5df9c252680731a0719edfb9b20381bf5", size = 199772, upload-time = "2026-01-16T02:07:14.707Z" }, ] [[package]] @@ -388,20 +519,20 @@ wheels = [ [[package]] name = "distlib" -version = "0.3.9" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] [[package]] name = "dnspython" -version = "2.7.0" +version = "2.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, ] [[package]] @@ -437,27 +568,27 @@ wheels = [ [[package]] name = "email-validator" -version = "2.2.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, ] [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] @@ -480,16 +611,18 @@ lua = [ [[package]] name = "fastmcp" -version = "2.14.2" +version = "2.14.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, { name = "cyclopts" }, { name = "exceptiongroup" }, { name = "httpx" }, + { name = "jsonref" }, { name = "jsonschema-path" }, { name = "mcp" }, { name = "openapi-pydantic" }, + { name = "packaging" }, { name = "platformdirs" }, { name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] }, { name = "pydantic", extra = ["email"] }, @@ -500,139 +633,172 @@ dependencies = [ { name = "uvicorn" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/1e/e3528227688c248283f6d86869b1e900563ffc223eff00f4f923d2750365/fastmcp-2.14.2.tar.gz", hash = "sha256:bd23d1b808b6f446444f10114dac468b11bfb9153ed78628f5619763d0cf573e", size = 8272966, upload-time = "2025-12-31T15:26:13.433Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/a9/a57d5e5629ebd4ef82b495a7f8e346ce29ef80cc86b15c8c40570701b94d/fastmcp-2.14.4.tar.gz", hash = "sha256:c01f19845c2adda0a70d59525c9193be64a6383014c8d40ce63345ac664053ff", size = 8302239, upload-time = "2026-01-22T17:29:37.024Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/67/8456d39484fcb7afd0defed21918e773ed59a98b39e5b633328527c88367/fastmcp-2.14.2-py3-none-any.whl", hash = "sha256:e33cd622e1ebd5110af6a981804525b6cd41072e3c7d68268ed69ef3be651aca", size = 413279, upload-time = "2025-12-31T15:26:11.178Z" }, + { url = "https://files.pythonhosted.org/packages/3e/41/c4d407e2218fd60d84acb6cc5131d28ff876afecf325e3fd9d27b8318581/fastmcp-2.14.4-py3-none-any.whl", hash = "sha256:5858cff5e4c8ea8107f9bca2609d71d6256e0fce74495912f6e51625e466c49a", size = 417788, upload-time = "2026-01-22T17:29:35.159Z" }, ] [[package]] name = "filelock" -version = "3.18.0" +version = "3.20.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] [[package]] name = "frozenlist" -version = "1.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, - { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, - { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, - { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, - { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, - { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, - { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, - { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, - { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, - { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, - { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, - { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, - { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, - { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, - { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, - { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, - { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, - { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, - { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, - { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, - { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, - { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, - { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, - { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, - { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, - { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, - { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, - { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, - { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] name = "greenlet" -version = "3.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, - { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, - { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, - { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" }, - { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, - { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, - { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, - { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, - { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, - { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, - { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" }, - { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, - { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, - { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, - { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, - { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" }, - { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, - { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, - { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" }, - { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" }, - { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" }, - { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/99/1cd3411c56a410994669062bd73dd58270c00cc074cac15f385a1fd91f8a/greenlet-3.3.1.tar.gz", hash = "sha256:41848f3230b58c08bb43dee542e74a2a2e34d3c59dc3076cec9151aeeedcae98", size = 184690, upload-time = "2026-01-23T15:31:02.076Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" }, + { url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" }, + { url = "https://files.pythonhosted.org/packages/3b/cd/7a7ca57588dac3389e97f7c9521cb6641fd8b6602faf1eaa4188384757df/greenlet-3.3.1-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c620051669fd04ac6b60ebc70478210119c56e2d5d5df848baec4312e260e4ca", size = 622363, upload-time = "2026-01-23T16:15:54.754Z" }, + { url = "https://files.pythonhosted.org/packages/cf/05/821587cf19e2ce1f2b24945d890b164401e5085f9d09cbd969b0c193cd20/greenlet-3.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14194f5f4305800ff329cbf02c5fcc88f01886cadd29941b807668a45f0d2336", size = 609947, upload-time = "2026-01-23T15:32:51.004Z" }, + { url = "https://files.pythonhosted.org/packages/a4/52/ee8c46ed9f8babaa93a19e577f26e3d28a519feac6350ed6f25f1afee7e9/greenlet-3.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7b2fe4150a0cf59f847a67db8c155ac36aed89080a6a639e9f16df5d6c6096f1", size = 1567487, upload-time = "2026-01-23T16:04:22.125Z" }, + { url = "https://files.pythonhosted.org/packages/8f/7c/456a74f07029597626f3a6db71b273a3632aecb9afafeeca452cfa633197/greenlet-3.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49f4ad195d45f4a66a0eb9c1ba4832bb380570d361912fa3554746830d332149", size = 1636087, upload-time = "2026-01-23T15:33:47.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/2f/5e0e41f33c69655300a5e54aeb637cf8ff57f1786a3aba374eacc0228c1d/greenlet-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc98b9c4e4870fa983436afa999d4eb16b12872fab7071423d5262fa7120d57a", size = 227156, upload-time = "2026-01-23T15:34:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ab/717c58343cf02c5265b531384b248787e04d8160b8afe53d9eec053d7b44/greenlet-3.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:bfb2d1763d777de5ee495c85309460f6fd8146e50ec9d0ae0183dbf6f0a829d1", size = 226403, upload-time = "2026-01-23T15:31:39.372Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ab/d26750f2b7242c2b90ea2ad71de70cfcd73a948a49513188a0fc0d6fc15a/greenlet-3.3.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:7ab327905cabb0622adca5971e488064e35115430cec2c35a50fd36e72a315b3", size = 275205, upload-time = "2026-01-23T15:30:24.556Z" }, + { url = "https://files.pythonhosted.org/packages/10/d3/be7d19e8fad7c5a78eeefb2d896a08cd4643e1e90c605c4be3b46264998f/greenlet-3.3.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65be2f026ca6a176f88fb935ee23c18333ccea97048076aef4db1ef5bc0713ac", size = 599284, upload-time = "2026-01-23T16:00:58.584Z" }, + { url = "https://files.pythonhosted.org/packages/ae/21/fe703aaa056fdb0f17e5afd4b5c80195bbdab701208918938bd15b00d39b/greenlet-3.3.1-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7a3ae05b3d225b4155bda56b072ceb09d05e974bc74be6c3fc15463cf69f33fd", size = 610274, upload-time = "2026-01-23T16:05:29.312Z" }, + { url = "https://files.pythonhosted.org/packages/06/00/95df0b6a935103c0452dad2203f5be8377e551b8466a29650c4c5a5af6cc/greenlet-3.3.1-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:12184c61e5d64268a160226fb4818af4df02cfead8379d7f8b99a56c3a54ff3e", size = 624375, upload-time = "2026-01-23T16:15:55.915Z" }, + { url = "https://files.pythonhosted.org/packages/cb/86/5c6ab23bb3c28c21ed6bebad006515cfe08b04613eb105ca0041fecca852/greenlet-3.3.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6423481193bbbe871313de5fd06a082f2649e7ce6e08015d2a76c1e9186ca5b3", size = 612904, upload-time = "2026-01-23T15:32:52.317Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f3/7949994264e22639e40718c2daf6f6df5169bf48fb038c008a489ec53a50/greenlet-3.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:33a956fe78bbbda82bfc95e128d61129b32d66bcf0a20a1f0c08aa4839ffa951", size = 1567316, upload-time = "2026-01-23T16:04:23.316Z" }, + { url = "https://files.pythonhosted.org/packages/8d/6e/d73c94d13b6465e9f7cd6231c68abde838bb22408596c05d9059830b7872/greenlet-3.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b065d3284be43728dd280f6f9a13990b56470b81be20375a207cdc814a983f2", size = 1636549, upload-time = "2026-01-23T15:33:48.643Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b3/c9c23a6478b3bcc91f979ce4ca50879e4d0b2bd7b9a53d8ecded719b92e2/greenlet-3.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:27289986f4e5b0edec7b5a91063c109f0276abb09a7e9bdab08437525977c946", size = 227042, upload-time = "2026-01-23T15:33:58.216Z" }, + { url = "https://files.pythonhosted.org/packages/90/e7/824beda656097edee36ab15809fd063447b200cc03a7f6a24c34d520bc88/greenlet-3.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:2f080e028001c5273e0b42690eaf359aeef9cb1389da0f171ea51a5dc3c7608d", size = 226294, upload-time = "2026-01-23T15:30:52.73Z" }, + { url = "https://files.pythonhosted.org/packages/ae/fb/011c7c717213182caf78084a9bea51c8590b0afda98001f69d9f853a495b/greenlet-3.3.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:bd59acd8529b372775cd0fcbc5f420ae20681c5b045ce25bd453ed8455ab99b5", size = 275737, upload-time = "2026-01-23T15:32:16.889Z" }, + { url = "https://files.pythonhosted.org/packages/41/2e/a3a417d620363fdbb08a48b1dd582956a46a61bf8fd27ee8164f9dfe87c2/greenlet-3.3.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b31c05dd84ef6871dd47120386aed35323c944d86c3d91a17c4b8d23df62f15b", size = 646422, upload-time = "2026-01-23T16:01:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/b4/09/c6c4a0db47defafd2d6bab8ddfe47ad19963b4e30f5bed84d75328059f8c/greenlet-3.3.1-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02925a0bfffc41e542c70aa14c7eda3593e4d7e274bfcccca1827e6c0875902e", size = 658219, upload-time = "2026-01-23T16:05:30.956Z" }, + { url = "https://files.pythonhosted.org/packages/e2/89/b95f2ddcc5f3c2bc09c8ee8d77be312df7f9e7175703ab780f2014a0e781/greenlet-3.3.1-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3e0f3878ca3a3ff63ab4ea478585942b53df66ddde327b59ecb191b19dbbd62d", size = 671455, upload-time = "2026-01-23T16:15:57.232Z" }, + { url = "https://files.pythonhosted.org/packages/80/38/9d42d60dffb04b45f03dbab9430898352dba277758640751dc5cc316c521/greenlet-3.3.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34a729e2e4e4ffe9ae2408d5ecaf12f944853f40ad724929b7585bca808a9d6f", size = 660237, upload-time = "2026-01-23T15:32:53.967Z" }, + { url = "https://files.pythonhosted.org/packages/96/61/373c30b7197f9e756e4c81ae90a8d55dc3598c17673f91f4d31c3c689c3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aec9ab04e82918e623415947921dea15851b152b822661cce3f8e4393c3df683", size = 1615261, upload-time = "2026-01-23T16:04:25.066Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d3/ca534310343f5945316f9451e953dcd89b36fe7a19de652a1dc5a0eeef3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:71c767cf281a80d02b6c1bdc41c9468e1f5a494fb11bc8688c360524e273d7b1", size = 1683719, upload-time = "2026-01-23T15:33:50.61Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/c21a3fd5d2c9c8b622e7bede6d6d00e00551a5ee474ea6d831b5f567a8b4/greenlet-3.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:96aff77af063b607f2489473484e39a0bbae730f2ea90c9e5606c9b73c44174a", size = 228125, upload-time = "2026-01-23T15:32:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/6a/8e/8a2db6d11491837af1de64b8aff23707c6e85241be13c60ed399a72e2ef8/greenlet-3.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:b066e8b50e28b503f604fa538adc764a638b38cf8e81e025011d26e8a627fa79", size = 227519, upload-time = "2026-01-23T15:31:47.284Z" }, + { url = "https://files.pythonhosted.org/packages/28/24/cbbec49bacdcc9ec652a81d3efef7b59f326697e7edf6ed775a5e08e54c2/greenlet-3.3.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:3e63252943c921b90abb035ebe9de832c436401d9c45f262d80e2d06cc659242", size = 282706, upload-time = "2026-01-23T15:33:05.525Z" }, + { url = "https://files.pythonhosted.org/packages/86/2e/4f2b9323c144c4fe8842a4e0d92121465485c3c2c5b9e9b30a52e80f523f/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76e39058e68eb125de10c92524573924e827927df5d3891fbc97bd55764a8774", size = 651209, upload-time = "2026-01-23T16:01:01.517Z" }, + { url = "https://files.pythonhosted.org/packages/d9/87/50ca60e515f5bb55a2fbc5f0c9b5b156de7d2fc51a0a69abc9d23914a237/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9f9d5e7a9310b7a2f416dd13d2e3fd8b42d803968ea580b7c0f322ccb389b97", size = 654300, upload-time = "2026-01-23T16:05:32.199Z" }, + { url = "https://files.pythonhosted.org/packages/7c/25/c51a63f3f463171e09cb586eb64db0861eb06667ab01a7968371a24c4f3b/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b9721549a95db96689458a1e0ae32412ca18776ed004463df3a9299c1b257ab", size = 662574, upload-time = "2026-01-23T16:15:58.364Z" }, + { url = "https://files.pythonhosted.org/packages/1d/94/74310866dfa2b73dd08659a3d18762f83985ad3281901ba0ee9a815194fb/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92497c78adf3ac703b57f1e3813c2d874f27f71a178f9ea5887855da413cd6d2", size = 653842, upload-time = "2026-01-23T15:32:55.671Z" }, + { url = "https://files.pythonhosted.org/packages/97/43/8bf0ffa3d498eeee4c58c212a3905dd6146c01c8dc0b0a046481ca29b18c/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ed6b402bc74d6557a705e197d47f9063733091ed6357b3de33619d8a8d93ac53", size = 1614917, upload-time = "2026-01-23T16:04:26.276Z" }, + { url = "https://files.pythonhosted.org/packages/89/90/a3be7a5f378fc6e84abe4dcfb2ba32b07786861172e502388b4c90000d1b/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:59913f1e5ada20fde795ba906916aea25d442abcc0593fba7e26c92b7ad76249", size = 1676092, upload-time = "2026-01-23T15:33:52.176Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2b/98c7f93e6db9977aaee07eb1e51ca63bd5f779b900d362791d3252e60558/greenlet-3.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:301860987846c24cb8964bdec0e31a96ad4a2a801b41b4ef40963c1b44f33451", size = 233181, upload-time = "2026-01-23T15:33:00.29Z" }, ] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload-time = "2022-09-25T15:40:01.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload-time = "2022-09-25T15:39:59.68Z" }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] [[package]] name = "httpcore" -version = "1.0.8" +version = "1.0.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/45/ad3e1b4d448f22c0cff4f5692f5ed0666658578e358b8d58a19846048059/httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad", size = 85385, upload-time = "2025-04-11T14:42:46.661Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732, upload-time = "2025-04-11T14:42:44.896Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] [[package]] @@ -652,29 +818,29 @@ wheels = [ [[package]] name = "httpx-sse" -version = "0.4.0" +version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload-time = "2023-12-22T08:01:21.083Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload-time = "2023-12-22T08:01:19.89Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] [[package]] name = "identify" -version = "2.6.12" +version = "2.6.16" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, ] [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] @@ -691,25 +857,25 @@ wheels = [ [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] name = "inquirer" -version = "3.4.0" +version = "3.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "blessed" }, { name = "editor" }, { name = "readchar" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/06/ef91eb8f3feafb736aa33dcb278fc9555d17861aa571b684715d095db24d/inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b", size = 14472, upload-time = "2024-08-12T12:03:43.83Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/79/165579fdcd3c2439503732ae76394bf77f5542f3dd18135b60e808e4813c/inquirer-3.4.1.tar.gz", hash = "sha256:60d169fddffe297e2f8ad54ab33698249ccfc3fc377dafb1e5cf01a0efb9cbe5", size = 14069, upload-time = "2025-08-02T18:36:27.901Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/b2/be907c8c0f8303bc4b10089f5470014c3bf3521e9b8d3decf3037fd94725/inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60", size = 18077, upload-time = "2024-08-12T12:03:41.589Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/7c404169a3e04a908df0644893a331f253a7f221961f2b6c0cf44430ae5a/inquirer-3.4.1-py3-none-any.whl", hash = "sha256:717bf146d547b595d2495e7285fd55545cff85e5ce01decc7487d2ec6a605412", size = 18152, upload-time = "2025-08-02T18:36:26.753Z" }, ] [[package]] @@ -726,23 +892,23 @@ wheels = [ [[package]] name = "jaraco-context" -version = "6.0.1" +version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, + { url = "https://files.pythonhosted.org/packages/8d/48/aa685dbf1024c7bd82bede569e3a85f82c32fd3d79ba5fea578f0159571a/jaraco_context-6.1.0-py3-none-any.whl", hash = "sha256:a43b5ed85815223d0d3cfdb6d7ca0d2bc8946f28f30b6f3216bda070f68badda", size = 7065, upload-time = "2026-01-13T02:53:53.031Z" }, ] [[package]] name = "jaraco-functools" -version = "4.1.0" +version = "4.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "more-itertools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/23/9894b3df5d0a6eb44611c36aec777823fc2e07740dabbd0b810e19594013/jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d", size = 19159, upload-time = "2024-09-27T19:47:09.122Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/27/056e0638a86749374d6f57d0b0db39f29509cce9313cf91bdc0ac4d91084/jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb", size = 19943, upload-time = "2025-12-21T09:29:43.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/4f/24b319316142c44283d7540e76c7b5a6dbd5db623abd86bb7b3491c21018/jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649", size = 10187, upload-time = "2024-09-27T19:47:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c4/813bb09f0985cb21e959f21f2464169eca882656849adf727ac7bb7e1767/jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176", size = 10481, upload-time = "2025-12-21T09:29:42.27Z" }, ] [[package]] @@ -766,9 +932,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/e3/0e0014d6ab159d48189e92044ace13b1e1fe9aa3024ba9f4e8cf172aa7c2/jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5", size = 33085, upload-time = "2024-07-31T22:39:17.426Z" }, ] +[[package]] +name = "jsonref" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/0d/c1f3277e90ccdb50d33ed5ba1ec5b3f0a242ed8c1b1a85d3afeb68464dca/jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552", size = 8814, upload-time = "2023-01-16T16:10:04.455Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/ec/e1db9922bceb168197a558a2b8c03a7963f1afe93517ddd3cf99f202f996/jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9", size = 9425, upload-time = "2023-01-16T16:10:02.255Z" }, +] + [[package]] name = "jsonschema" -version = "4.24.0" +version = "4.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -776,9 +951,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, ] [[package]] @@ -798,19 +973,19 @@ wheels = [ [[package]] name = "jsonschema-specifications" -version = "2025.4.1" +version = "2025.9.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] [[package]] name = "keyring" -version = "25.6.0" +version = "25.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jaraco-classes" }, @@ -820,9 +995,9 @@ dependencies = [ { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, { name = "secretstorage", marker = "sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, ] [[package]] @@ -872,7 +1047,7 @@ dev = [ [[package]] name = "linkedin-scraper" -version = "3.0.1" +version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -882,9 +1057,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/ac/af65e5359fcdd08d0cc194674e67106ce40027d5f55142243887681e0462/linkedin_scraper-3.0.1.tar.gz", hash = "sha256:6e9c54fd6b78003d0be370bbfacb69b52bb023c7c07bcc9d8b508d94048ea058", size = 39638, upload-time = "2026-01-07T03:09:52.482Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/30/967d78a67bc974e65491582e23993ca078d47c7b634842af13c8422162b9/linkedin_scraper-3.1.0.tar.gz", hash = "sha256:830bd3a4c16aeb667f5a00c0eed7528c80e0b360016f4c8eecd9cebad0d8728e", size = 46636, upload-time = "2026-01-18T23:55:47.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/c5/7fc84e2fca5608b6c8eec36db4f14e8dd4e59a059da84deba94c49faa875/linkedin_scraper-3.0.1-py3-none-any.whl", hash = "sha256:e121f963d17e0fc1503a4fd1b7c37fb9ccdcfc587dae4ca3defc073a81aff522", size = 44724, upload-time = "2026-01-07T03:09:51.478Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a7/ce6de57a4bd75bfadaa23fb8f3eaa0b86de779335c13be08f8bbf3846438/linkedin_scraper-3.1.0-py3-none-any.whl", hash = "sha256:1e3ad52cd858d25034cab5f82261bfe35451941faec6003714aff2e745939212", size = 52372, upload-time = "2026-01-18T23:55:45.745Z" }, ] [[package]] @@ -941,59 +1116,99 @@ wheels = [ [[package]] name = "lxml" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c5/ed/60eb6fa2923602fba988d9ca7c5cdbd7cf25faa795162ed538b527a35411/lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72", size = 4096938, upload-time = "2025-06-26T16:28:19.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/89/c3/d01d735c298d7e0ddcedf6f028bf556577e5ab4f4da45175ecd909c79378/lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108", size = 8429515, upload-time = "2025-06-26T16:26:06.776Z" }, - { url = "https://files.pythonhosted.org/packages/06/37/0e3eae3043d366b73da55a86274a590bae76dc45aa004b7042e6f97803b1/lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be", size = 4601387, upload-time = "2025-06-26T16:26:09.511Z" }, - { url = "https://files.pythonhosted.org/packages/a3/28/e1a9a881e6d6e29dda13d633885d13acb0058f65e95da67841c8dd02b4a8/lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab", size = 5228928, upload-time = "2025-06-26T16:26:12.337Z" }, - { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" }, - { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" }, - { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" }, - { url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016, upload-time = "2025-07-03T19:19:06.008Z" }, - { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" }, - { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" }, - { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" }, - { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" }, - { url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523, upload-time = "2025-07-03T19:19:09.837Z" }, - { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" }, - { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" }, - { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" }, - { url = "https://files.pythonhosted.org/packages/55/10/dc8e5290ae4c94bdc1a4c55865be7e1f31dfd857a88b21cbba68b5fea61b/lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb", size = 3674431, upload-time = "2025-06-26T16:26:35.959Z" }, - { url = "https://files.pythonhosted.org/packages/79/21/6e7c060822a3c954ff085e5e1b94b4a25757c06529eac91e550f3f5cd8b8/lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da", size = 8414372, upload-time = "2025-06-26T16:26:39.079Z" }, - { url = "https://files.pythonhosted.org/packages/a4/f6/051b1607a459db670fc3a244fa4f06f101a8adf86cda263d1a56b3a4f9d5/lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7", size = 4593940, upload-time = "2025-06-26T16:26:41.891Z" }, - { url = "https://files.pythonhosted.org/packages/8e/74/dd595d92a40bda3c687d70d4487b2c7eff93fd63b568acd64fedd2ba00fe/lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3", size = 5214329, upload-time = "2025-06-26T16:26:44.669Z" }, - { url = "https://files.pythonhosted.org/packages/52/46/3572761efc1bd45fcafb44a63b3b0feeb5b3f0066886821e94b0254f9253/lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81", size = 4947559, upload-time = "2025-06-28T18:47:31.091Z" }, - { url = "https://files.pythonhosted.org/packages/94/8a/5e40de920e67c4f2eef9151097deb9b52d86c95762d8ee238134aff2125d/lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1", size = 5102143, upload-time = "2025-06-28T18:47:33.612Z" }, - { url = "https://files.pythonhosted.org/packages/7c/4b/20555bdd75d57945bdabfbc45fdb1a36a1a0ff9eae4653e951b2b79c9209/lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24", size = 5021931, upload-time = "2025-06-26T16:26:47.503Z" }, - { url = "https://files.pythonhosted.org/packages/b6/6e/cf03b412f3763d4ca23b25e70c96a74cfece64cec3addf1c4ec639586b13/lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a", size = 5645469, upload-time = "2025-07-03T19:19:13.32Z" }, - { url = "https://files.pythonhosted.org/packages/d4/dd/39c8507c16db6031f8c1ddf70ed95dbb0a6d466a40002a3522c128aba472/lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29", size = 5247467, upload-time = "2025-06-26T16:26:49.998Z" }, - { url = "https://files.pythonhosted.org/packages/4d/56/732d49def0631ad633844cfb2664563c830173a98d5efd9b172e89a4800d/lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4", size = 4720601, upload-time = "2025-06-26T16:26:52.564Z" }, - { url = "https://files.pythonhosted.org/packages/8f/7f/6b956fab95fa73462bca25d1ea7fc8274ddf68fb8e60b78d56c03b65278e/lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca", size = 5060227, upload-time = "2025-06-26T16:26:55.054Z" }, - { url = "https://files.pythonhosted.org/packages/97/06/e851ac2924447e8b15a294855caf3d543424364a143c001014d22c8ca94c/lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf", size = 4790637, upload-time = "2025-06-26T16:26:57.384Z" }, - { url = "https://files.pythonhosted.org/packages/06/d4/fd216f3cd6625022c25b336c7570d11f4a43adbaf0a56106d3d496f727a7/lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f", size = 5662049, upload-time = "2025-07-03T19:19:16.409Z" }, - { url = "https://files.pythonhosted.org/packages/52/03/0e764ce00b95e008d76b99d432f1807f3574fb2945b496a17807a1645dbd/lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef", size = 5272430, upload-time = "2025-06-26T16:27:00.031Z" }, - { url = "https://files.pythonhosted.org/packages/5f/01/d48cc141bc47bc1644d20fe97bbd5e8afb30415ec94f146f2f76d0d9d098/lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181", size = 3612896, upload-time = "2025-06-26T16:27:04.251Z" }, - { url = "https://files.pythonhosted.org/packages/f4/87/6456b9541d186ee7d4cb53bf1b9a0d7f3b1068532676940fdd594ac90865/lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e", size = 4013132, upload-time = "2025-06-26T16:27:06.415Z" }, - { url = "https://files.pythonhosted.org/packages/b7/42/85b3aa8f06ca0d24962f8100f001828e1f1f1a38c954c16e71154ed7d53a/lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03", size = 3672642, upload-time = "2025-06-26T16:27:09.888Z" }, +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494, upload-time = "2025-09-22T04:01:54.242Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146, upload-time = "2025-09-22T04:01:56.282Z" }, + { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932, upload-time = "2025-09-22T04:01:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060, upload-time = "2025-09-22T04:02:00.812Z" }, + { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000, upload-time = "2025-09-22T04:02:02.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496, upload-time = "2025-09-22T04:02:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779, upload-time = "2025-09-22T04:02:06.689Z" }, + { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072, upload-time = "2025-09-22T04:02:08.587Z" }, + { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675, upload-time = "2025-09-22T04:02:10.783Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171, upload-time = "2025-09-22T04:02:12.631Z" }, + { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175, upload-time = "2025-09-22T04:02:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688, upload-time = "2025-09-22T04:02:16.957Z" }, + { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655, upload-time = "2025-09-22T04:02:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695, upload-time = "2025-09-22T04:02:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841, upload-time = "2025-09-22T04:02:22.489Z" }, + { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700, upload-time = "2025-09-22T04:02:24.465Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347, upload-time = "2025-09-22T04:02:26.286Z" }, + { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248, upload-time = "2025-09-22T04:02:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801, upload-time = "2025-09-22T04:02:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403, upload-time = "2025-09-22T04:02:32.119Z" }, + { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974, upload-time = "2025-09-22T04:02:34.155Z" }, + { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953, upload-time = "2025-09-22T04:02:36.054Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054, upload-time = "2025-09-22T04:02:38.154Z" }, + { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421, upload-time = "2025-09-22T04:02:40.413Z" }, + { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684, upload-time = "2025-09-22T04:02:42.288Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463, upload-time = "2025-09-22T04:02:44.165Z" }, + { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437, upload-time = "2025-09-22T04:02:46.524Z" }, + { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890, upload-time = "2025-09-22T04:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185, upload-time = "2025-09-22T04:02:50.746Z" }, + { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895, upload-time = "2025-09-22T04:02:52.968Z" }, + { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246, upload-time = "2025-09-22T04:02:54.798Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797, upload-time = "2025-09-22T04:02:57.058Z" }, + { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404, upload-time = "2025-09-22T04:02:58.966Z" }, + { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072, upload-time = "2025-09-22T04:03:38.05Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617, upload-time = "2025-09-22T04:03:39.835Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930, upload-time = "2025-09-22T04:03:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380, upload-time = "2025-09-22T04:03:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632, upload-time = "2025-09-22T04:03:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171, upload-time = "2025-09-22T04:03:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109, upload-time = "2025-09-22T04:03:07.452Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061, upload-time = "2025-09-22T04:03:09.297Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233, upload-time = "2025-09-22T04:03:11.651Z" }, + { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739, upload-time = "2025-09-22T04:03:13.592Z" }, + { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119, upload-time = "2025-09-22T04:03:15.408Z" }, + { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665, upload-time = "2025-09-22T04:03:17.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997, upload-time = "2025-09-22T04:03:19.14Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957, upload-time = "2025-09-22T04:03:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372, upload-time = "2025-09-22T04:03:23.27Z" }, + { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653, upload-time = "2025-09-22T04:03:25.767Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795, upload-time = "2025-09-22T04:03:27.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023, upload-time = "2025-09-22T04:03:30.056Z" }, + { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420, upload-time = "2025-09-22T04:03:32.198Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837, upload-time = "2025-09-22T04:03:34.027Z" }, + { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205, upload-time = "2025-09-22T04:03:36.249Z" }, ] [[package]] name = "markdown-it-py" -version = "3.0.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] [[package]] name = "mcp" -version = "1.25.0" +version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1011,9 +1226,9 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, ] [[package]] @@ -1027,83 +1242,119 @@ wheels = [ [[package]] name = "more-itertools" -version = "10.7.0" +version = "10.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] [[package]] name = "multidict" -version = "6.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, - { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, - { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, - { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, - { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, - { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, - { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, - { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, - { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, - { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, - { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, - { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, - { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, - { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, - { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, - { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, - { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, - { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, - { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, - { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, - { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, - { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, - { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, - { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, - { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, - { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, - { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, - { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, - { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, - { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, - { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, - { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, - { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, - { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, - { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, - { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, - { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, - { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, - { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, - { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, - { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, - { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] name = "nodeenv" -version = "1.9.1" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, ] [[package]] @@ -1189,11 +1440,11 @@ wheels = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] @@ -1216,11 +1467,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] [[package]] @@ -1253,7 +1504,7 @@ wheels = [ [[package]] name = "pre-commit" -version = "4.2.0" +version = "4.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, @@ -1262,75 +1513,102 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, + { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, ] [[package]] name = "prometheus-client" -version = "0.23.1" +version = "0.24.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/58/a794d23feb6b00fc0c72787d7e87d872a6730dd9ed7c7b3e954637d8f280/prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9", size = 85616, upload-time = "2026-01-14T15:26:26.965Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, + { url = "https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055", size = 64057, upload-time = "2026-01-14T15:26:24.42Z" }, ] [[package]] name = "propcache" -version = "0.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, - { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, - { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, - { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, - { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, - { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, - { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, - { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, - { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, - { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, - { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, - { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, - { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -1376,16 +1654,16 @@ wheels = [ [[package]] name = "pycparser" -version = "2.22" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] name = "pydantic" -version = "2.11.7" +version = "2.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1393,9 +1671,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] [package.optional-dependencies] @@ -1405,62 +1683,92 @@ email = [ [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, ] [[package]] name = "pydantic-settings" -version = "2.8.1" +version = "2.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550, upload-time = "2025-02-27T10:10:32.338Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839, upload-time = "2025-02-27T10:10:30.711Z" }, + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, ] [[package]] name = "pydocket" -version = "0.16.3" +version = "0.16.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cloudpickle" }, @@ -1476,9 +1784,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/c5/61dcfce4d50b66a3f09743294d37fab598b81bb0975054b7f732da9243ec/pydocket-0.16.3.tar.gz", hash = "sha256:78e9da576de09e9f3f410d2471ef1c679b7741ddd21b586c97a13872b69bd265", size = 297080, upload-time = "2025-12-23T23:37:33.32Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/00/26befe5f58df7cd1aeda4a8d10bc7d1908ffd86b80fd995e57a2a7b3f7bd/pydocket-0.16.6.tar.gz", hash = "sha256:b96c96ad7692827214ed4ff25fcf941ec38371314db5dcc1ae792b3e9d3a0294", size = 299054, upload-time = "2026-01-09T22:09:15.405Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/94/93b7f5981aa04f922e0d9ce7326a4587866ec7e39f7c180ffcf408e66ee8/pydocket-0.16.3-py3-none-any.whl", hash = "sha256:e2b50925356e7cd535286255195458ac7bba15f25293356651b36d223db5dd7c", size = 67087, upload-time = "2025-12-23T23:37:31.829Z" }, + { url = "https://files.pythonhosted.org/packages/0a/3f/7483e5a6dc6326b6e0c640619b5c5bd1d6e3c20e54d58f5fb86267cef00e/pydocket-0.16.6-py3-none-any.whl", hash = "sha256:683d21e2e846aa5106274e7d59210331b242d7fb0dce5b08d3b82065663ed183", size = 67697, upload-time = "2026-01-09T22:09:13.436Z" }, ] [[package]] @@ -1495,11 +1803,11 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] [[package]] @@ -1518,57 +1826,63 @@ crypto = [ [[package]] name = "pyperclip" -version = "1.9.0" +version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/52/d87eba7cb129b81563019d1679026e7a112ef76855d6159d24754dbd2a51/pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6", size = 12185, upload-time = "2025-09-26T14:40:37.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" }, +] [[package]] name = "pytest" -version = "8.3.5" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, + { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] [[package]] name = "pytest-asyncio" -version = "1.0.0" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, ] [[package]] name = "pytest-cov" -version = "6.1.1" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage" }, + { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857, upload-time = "2025-04-05T14:07:51.592Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841, upload-time = "2025-04-05T14:07:49.641Z" }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] [[package]] name = "python-dotenv" -version = "1.1.1" +version = "1.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, ] [[package]] @@ -1582,11 +1896,11 @@ wheels = [ [[package]] name = "python-multipart" -version = "0.0.20" +version = "0.0.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, ] [[package]] @@ -1616,28 +1930,48 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] [[package]] @@ -1674,7 +2008,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1682,22 +2016,22 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] name = "rich" -version = "14.0.0" +version = "14.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/9c/137848452e130e71f3ca9a9876751ddcac99e4b1f248ed297996c8c2d728/rich-14.3.0.tar.gz", hash = "sha256:b75e54d3abbcc49137e83e4db54dc86c5e47687eebc95aa0305363231a36e699", size = 230113, upload-time = "2026-01-24T12:25:46.336Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/fa/e0/83cbdcb81b5cbbbe355648dd402b410437806544f48ee218a2354798f012/rich-14.3.0-py3-none-any.whl", hash = "sha256:0b8c1e368c1125b9e993c2d2f1342802525f4853fc6dac2e8e9e88bac0f45bce", size = 309950, upload-time = "2026-01-24T12:25:44.679Z" }, ] [[package]] @@ -1715,103 +2049,109 @@ wheels = [ [[package]] name = "rpds-py" -version = "0.26.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, - { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, - { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, - { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, - { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, - { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, - { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, - { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, - { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, - { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, - { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, - { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, - { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, - { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, - { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, - { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, - { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, - { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, - { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, - { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, - { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, - { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, - { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, - { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, - { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, - { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, - { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, - { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, - { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, - { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, - { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, - { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, - { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, - { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, - { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, - { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, - { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, - { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, - { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, - { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, - { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, - { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, - { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, - { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, - { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, - { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, - { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, - { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, - { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, - { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, - { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, - { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, - { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, ] [[package]] name = "ruff" -version = "0.11.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/53/ae4857030d59286924a8bdb30d213d6ff22d8f0957e738d0289990091dd8/ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d", size = 4186707, upload-time = "2025-05-22T19:19:34.363Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/14/f2326676197bab099e2a24473158c21656fbf6a207c65f596ae15acb32b9/ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092", size = 10229049, upload-time = "2025-05-22T19:18:45.516Z" }, - { url = "https://files.pythonhosted.org/packages/9a/f3/bff7c92dd66c959e711688b2e0768e486bbca46b2f35ac319bb6cce04447/ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4", size = 11053601, upload-time = "2025-05-22T19:18:49.269Z" }, - { url = "https://files.pythonhosted.org/packages/e2/38/8e1a3efd0ef9d8259346f986b77de0f62c7a5ff4a76563b6b39b68f793b9/ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd", size = 10367421, upload-time = "2025-05-22T19:18:51.754Z" }, - { url = "https://files.pythonhosted.org/packages/b4/50/557ad9dd4fb9d0bf524ec83a090a3932d284d1a8b48b5906b13b72800e5f/ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6", size = 10581980, upload-time = "2025-05-22T19:18:54.011Z" }, - { url = "https://files.pythonhosted.org/packages/c4/b2/e2ed82d6e2739ece94f1bdbbd1d81b712d3cdaf69f0a1d1f1a116b33f9ad/ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4", size = 10089241, upload-time = "2025-05-22T19:18:56.041Z" }, - { url = "https://files.pythonhosted.org/packages/3d/9f/b4539f037a5302c450d7c695c82f80e98e48d0d667ecc250e6bdeb49b5c3/ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac", size = 11699398, upload-time = "2025-05-22T19:18:58.248Z" }, - { url = "https://files.pythonhosted.org/packages/61/fb/32e029d2c0b17df65e6eaa5ce7aea5fbeaed22dddd9fcfbbf5fe37c6e44e/ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709", size = 12427955, upload-time = "2025-05-22T19:19:00.981Z" }, - { url = "https://files.pythonhosted.org/packages/6e/e3/160488dbb11f18c8121cfd588e38095ba779ae208292765972f7732bfd95/ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8", size = 12069803, upload-time = "2025-05-22T19:19:03.258Z" }, - { url = "https://files.pythonhosted.org/packages/ff/16/3b006a875f84b3d0bff24bef26b8b3591454903f6f754b3f0a318589dcc3/ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b", size = 11242630, upload-time = "2025-05-22T19:19:05.871Z" }, - { url = "https://files.pythonhosted.org/packages/65/0d/0338bb8ac0b97175c2d533e9c8cdc127166de7eb16d028a43c5ab9e75abd/ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875", size = 11507310, upload-time = "2025-05-22T19:19:08.584Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bf/d7130eb26174ce9b02348b9f86d5874eafbf9f68e5152e15e8e0a392e4a3/ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1", size = 10441144, upload-time = "2025-05-22T19:19:13.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/f3/4be2453b258c092ff7b1761987cf0749e70ca1340cd1bfb4def08a70e8d8/ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81", size = 10081987, upload-time = "2025-05-22T19:19:15.821Z" }, - { url = "https://files.pythonhosted.org/packages/6c/6e/dfa4d2030c5b5c13db158219f2ec67bf333e8a7748dccf34cfa2a6ab9ebc/ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639", size = 11073922, upload-time = "2025-05-22T19:19:18.104Z" }, - { url = "https://files.pythonhosted.org/packages/ff/f4/f7b0b0c3d32b593a20ed8010fa2c1a01f2ce91e79dda6119fcc51d26c67b/ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345", size = 11568537, upload-time = "2025-05-22T19:19:20.889Z" }, - { url = "https://files.pythonhosted.org/packages/d2/46/0e892064d0adc18bcc81deed9aaa9942a27fd2cd9b1b7791111ce468c25f/ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112", size = 10536492, upload-time = "2025-05-22T19:19:23.642Z" }, - { url = "https://files.pythonhosted.org/packages/1b/d9/232e79459850b9f327e9f1dc9c047a2a38a6f9689e1ec30024841fc4416c/ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f", size = 11612562, upload-time = "2025-05-22T19:19:27.013Z" }, - { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951, upload-time = "2025-05-22T19:19:30.043Z" }, +version = "0.14.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732, upload-time = "2026-01-22T22:30:17.527Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650, upload-time = "2026-01-22T22:30:08.578Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245, upload-time = "2026-01-22T22:30:04.155Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273, upload-time = "2026-01-22T22:30:34.642Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753, upload-time = "2026-01-22T22:30:31.781Z" }, + { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052, upload-time = "2026-01-22T22:30:24.827Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637, upload-time = "2026-01-22T22:30:13.239Z" }, + { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761, upload-time = "2026-01-22T22:30:22.542Z" }, + { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701, upload-time = "2026-01-22T22:30:20.499Z" }, + { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455, upload-time = "2026-01-22T22:29:59.679Z" }, + { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882, upload-time = "2026-01-22T22:29:57.032Z" }, + { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549, upload-time = "2026-01-22T22:30:27.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416, upload-time = "2026-01-22T22:30:01.964Z" }, + { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491, upload-time = "2026-01-22T22:30:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525, upload-time = "2026-01-22T22:30:06.499Z" }, + { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626, upload-time = "2026-01-22T22:30:36.848Z" }, + { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442, upload-time = "2026-01-22T22:30:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486, upload-time = "2026-01-22T22:30:10.852Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448, upload-time = "2026-01-22T22:30:15.417Z" }, ] [[package]] @@ -1828,15 +2168,15 @@ wheels = [ [[package]] name = "secretstorage" -version = "3.3.3" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "jeepney" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739, upload-time = "2022-08-13T16:22:46.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" }, + { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, ] [[package]] @@ -1848,24 +2188,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - [[package]] name = "sortedcontainers" version = "2.4.0" @@ -1877,57 +2199,57 @@ wheels = [ [[package]] name = "sse-starlette" -version = "2.2.1" +version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376, upload-time = "2024-12-25T09:09:30.616Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120, upload-time = "2024-12-25T09:09:26.761Z" }, + { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" }, ] [[package]] name = "starlette" -version = "0.46.1" +version = "0.52.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102, upload-time = "2025-03-08T10:55:34.504Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995, upload-time = "2025-03-08T10:55:32.662Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] [[package]] name = "ty" -version = "0.0.1a12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/91/be7dfada3aec20ee06ed350d508f00a2fd47dfdcda61d76875e7cb80abfd/ty-0.0.1a12.tar.gz", hash = "sha256:41dfc8eac0b4fb735d5e101cde8c8734a3c13f670eeebc975760e6414882b702", size = 3127188, upload-time = "2025-06-25T11:50:06.358Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/66/93/b70c7f54f55de52c6a01cc16fbb0880e80b2bf4ce80f73722fe05d3db630/ty-0.0.1a12-py3-none-linux_armv6l.whl", hash = "sha256:acb0959ac54853e677a44a10bbb7b209389eac5ec4f3084705c8065625badfa3", size = 6718708, upload-time = "2025-06-25T11:49:33.281Z" }, - { url = "https://files.pythonhosted.org/packages/f6/79/5dff5e35e9c00a1ea632ef3d1844b989e0674a2871d30314cab147e51618/ty-0.0.1a12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:65da32147fac319ee4ca08af25e363ba8ebe461268e13dc3b09fcdd74974e338", size = 6837580, upload-time = "2025-06-25T11:49:35.287Z" }, - { url = "https://files.pythonhosted.org/packages/60/8c/800e21ee673a00a6360519946f45266791ffc5fd40ad3ff3ea36d1d689a6/ty-0.0.1a12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f8522efca591a621f19af89d639176f329b46d6db475510333fca92e4bc8279e", size = 6468202, upload-time = "2025-06-25T11:49:36.97Z" }, - { url = "https://files.pythonhosted.org/packages/a2/48/f3913803eb5f7a99fc449dd047b1e61735e917dc59294ff55b4637ca69d1/ty-0.0.1a12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d6627db2a8ebc12a28acf55d017f8a11e06a87f55dae4dee5677ea02dc72702", size = 6596703, upload-time = "2025-06-25T11:49:38.739Z" }, - { url = "https://files.pythonhosted.org/packages/17/bc/ad290112a7cbe4bfae9e33611971d3228314ca8bd5dc8faeb33dd015c427/ty-0.0.1a12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2876e6e947d5696511d8b185f3b45dc3f8a96c409e3fe1c05533cef0fcd9541b", size = 6577838, upload-time = "2025-06-25T11:49:40.713Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f5/2c4c26b1ebc2e0ddea6d7309133f4f48d3530b2fda14021a73aa2d596357/ty-0.0.1a12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fc31000e0f0e054c8aba92db67f1fcd73c588dab598b020789699f23fd61eff", size = 7349544, upload-time = "2025-06-25T11:49:42.517Z" }, - { url = "https://files.pythonhosted.org/packages/d4/b5/5985ad9e2a17fdf7df950a824c3b00a04086f4b4cd42a4c982d7dd2b43da/ty-0.0.1a12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6d214ad154ab9c265b268257703f46c6d4a3a5901e0e9bcbc879760a6118041", size = 7791984, upload-time = "2025-06-25T11:49:44.32Z" }, - { url = "https://files.pythonhosted.org/packages/a7/e0/db803dbbbcefc0ff343e46330e68ec42b6963eb2dcf30bb4f00fcdd2aa1c/ty-0.0.1a12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d60a61fe04acefafc8fcd1ef2dc1383ec2cad53c4409d4223817f85a1cb3ef8a", size = 7448658, upload-time = "2025-06-25T11:49:46.114Z" }, - { url = "https://files.pythonhosted.org/packages/47/45/cffd60dd22c8ec8a56a9de1a69ae9e7b0924f90994b6ef6d55a63656438e/ty-0.0.1a12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ad7dff29bb96bda0dec80dc494946e6cda3e377ebda755ff2d453db0211228e", size = 7323623, upload-time = "2025-06-25T11:49:48.229Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d4/153cbdb64ee712872959b9421bc1a4d06636946edf0aa984011dc7601230/ty-0.0.1a12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdd258c97f076de6e289cb41b3b24812ab5a562d4d0e98573bf38c195d564d92", size = 7130031, upload-time = "2025-06-25T11:49:50.21Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c6/4f20e75ec37782434b44d0b59be0011fa4ddcd2d0f2f91e7d53fb88e3e6c/ty-0.0.1a12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4b2fe76b8e7b4a066a962e839993f3422ce1391b2261afe0384b3560efce8f80", size = 6499534, upload-time = "2025-06-25T11:49:52.244Z" }, - { url = "https://files.pythonhosted.org/packages/52/ce/915232248ac9000f9122c477410b731d6c9c23e1da1c9002091f25270cd1/ty-0.0.1a12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b19ae81024646350a3bb1031c61608ed836e8cf05e8b6e1d3b6ab465abeeff80", size = 6608484, upload-time = "2025-06-25T11:49:54.305Z" }, - { url = "https://files.pythonhosted.org/packages/11/30/073cb624440173cfb40196fae9be87125e40b2d224fa9ced68df60a401cf/ty-0.0.1a12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:04cefeccc934a6389c21fd41426c271a95751e88544eb70f64953a8caa5306f8", size = 7012818, upload-time = "2025-06-25T11:49:56.228Z" }, - { url = "https://files.pythonhosted.org/packages/d3/13/cae962003ffd8c56cafcb1e466bb262692abef1a4a5ad95b602111ff410c/ty-0.0.1a12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1f2e07a927134f7287142ff264b8862025175d2329fa2293aedddb58ac59014d", size = 7191992, upload-time = "2025-06-25T11:49:58.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/cb/fe2a5bdf0f3b013798042b568893bd5a61387f03a6b4e7986b75d4a4d7ac/ty-0.0.1a12-py3-none-win32.whl", hash = "sha256:8f1571a10b5ff16eeaa91ed240ec880b2c008d9fcd106426fc904bddfc126fbc", size = 6381651, upload-time = "2025-06-25T11:50:00.112Z" }, - { url = "https://files.pythonhosted.org/packages/62/22/f7037027c07335d3f89663d196490542ff6c58191326a2c330b34cd3bf28/ty-0.0.1a12-py3-none-win_amd64.whl", hash = "sha256:6b3d8f787ef8247f5564cd86fdb182157bc99c220677988ef7f66cc6502ae83a", size = 6957120, upload-time = "2025-06-25T11:50:01.899Z" }, - { url = "https://files.pythonhosted.org/packages/72/80/82c4aca7b4246f3805a8d62b3650d574b18a848cf3f696c1d4bbdb5e613c/ty-0.0.1a12-py3-none-win_arm64.whl", hash = "sha256:5983f745cc40d15c77434d188dbce7218e2baceba88f1b8f1108763cedad81b4", size = 6572157, upload-time = "2025-06-25T11:50:03.986Z" }, +version = "0.0.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/dc/b607f00916f5a7c52860b84a66dc17bc6988e8445e96b1d6e175a3837397/ty-0.0.13.tar.gz", hash = "sha256:7a1d135a400ca076407ea30012d1f75419634160ed3b9cad96607bf2956b23b3", size = 4999183, upload-time = "2026-01-21T13:21:16.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/df/3632f1918f4c0a33184f107efc5d436ab6da147fd3d3b94b3af6461efbf4/ty-0.0.13-py3-none-linux_armv6l.whl", hash = "sha256:1b2b8e02697c3a94c722957d712a0615bcc317c9b9497be116ef746615d892f2", size = 9993501, upload-time = "2026-01-21T13:21:26.628Z" }, + { url = "https://files.pythonhosted.org/packages/92/87/6a473ced5ac280c6ce5b1627c71a8a695c64481b99aabc798718376a441e/ty-0.0.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f15cdb8e233e2b5adfce673bb21f4c5e8eaf3334842f7eea3c70ac6fda8c1de5", size = 9860986, upload-time = "2026-01-21T13:21:24.425Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9b/d89ae375cf0a7cd9360e1164ce017f8c753759be63b6a11ed4c944abe8c6/ty-0.0.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0819e89ac9f0d8af7a062837ce197f0461fee2fc14fd07e2c368780d3a397b73", size = 9350748, upload-time = "2026-01-21T13:21:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a6/9ad58518056fab344b20c0bb2c1911936ebe195318e8acc3bc45ac1c6b6b/ty-0.0.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de79f481084b7cc7a202ba0d7a75e10970d10ffa4f025b23f2e6b7324b74886", size = 9849884, upload-time = "2026-01-21T13:21:21.886Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c3/8add69095fa179f523d9e9afcc15a00818af0a37f2b237a9b59bc0046c34/ty-0.0.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4fb2154cff7c6e95d46bfaba283c60642616f20d73e5f96d0c89c269f3e1bcec", size = 9822975, upload-time = "2026-01-21T13:21:14.292Z" }, + { url = "https://files.pythonhosted.org/packages/a4/05/4c0927c68a0a6d43fb02f3f0b6c19c64e3461dc8ed6c404dde0efb8058f7/ty-0.0.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00be58d89337c27968a20d58ca553458608c5b634170e2bec82824c2e4cf4d96", size = 10294045, upload-time = "2026-01-21T13:21:30.505Z" }, + { url = "https://files.pythonhosted.org/packages/b4/86/6dc190838aba967557fe0bfd494c595d00b5081315a98aaf60c0e632aaeb/ty-0.0.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72435eade1fa58c6218abb4340f43a6c3ff856ae2dc5722a247d3a6dd32e9737", size = 10916460, upload-time = "2026-01-21T13:21:07.788Z" }, + { url = "https://files.pythonhosted.org/packages/04/40/9ead96b7c122e1109dfcd11671184c3506996bf6a649306ec427e81d9544/ty-0.0.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77a548742ee8f621d718159e7027c3b555051d096a49bb580249a6c5fc86c271", size = 10597154, upload-time = "2026-01-21T13:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7d/e832a2c081d2be845dc6972d0c7998914d168ccbc0b9c86794419ab7376e/ty-0.0.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da067c57c289b7cf914669704b552b6207c2cc7f50da4118c3e12388642e6b3f", size = 10410710, upload-time = "2026-01-21T13:21:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/31/e3/898be3a96237a32f05c4c29b43594dc3b46e0eedfe8243058e46153b324f/ty-0.0.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d1b50a01fffa140417fca5a24b658fbe0734074a095d5b6f0552484724474343", size = 9826299, upload-time = "2026-01-21T13:21:00.845Z" }, + { url = "https://files.pythonhosted.org/packages/bb/eb/db2d852ce0ed742505ff18ee10d7d252f3acfd6fc60eca7e9c7a0288a6d8/ty-0.0.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0f33c46f52e5e9378378eca0d8059f026f3c8073ace02f7f2e8d079ddfe5207e", size = 9831610, upload-time = "2026-01-21T13:21:05.842Z" }, + { url = "https://files.pythonhosted.org/packages/9e/61/149f59c8abaddcbcbb0bd13b89c7741ae1c637823c5cf92ed2c644fcadef/ty-0.0.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:168eda24d9a0b202cf3758c2962cc295878842042b7eca9ed2965259f59ce9f2", size = 9978885, upload-time = "2026-01-21T13:21:10.306Z" }, + { url = "https://files.pythonhosted.org/packages/a0/cd/026d4e4af60a80918a8d73d2c42b8262dd43ab2fa7b28d9743004cb88d57/ty-0.0.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d4917678b95dc8cb399cc459fab568ba8d5f0f33b7a94bf840d9733043c43f29", size = 10506453, upload-time = "2026-01-21T13:20:56.633Z" }, + { url = "https://files.pythonhosted.org/packages/63/06/8932833a4eca2df49c997a29afb26721612de8078ae79074c8fe87e17516/ty-0.0.13-py3-none-win32.whl", hash = "sha256:c1f2ec40daa405508b053e5b8e440fbae5fdb85c69c9ab0ee078f8bc00eeec3d", size = 9433482, upload-time = "2026-01-21T13:20:58.717Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fd/e8d972d1a69df25c2cecb20ea50e49ad5f27a06f55f1f5f399a563e71645/ty-0.0.13-py3-none-win_amd64.whl", hash = "sha256:8b7b1ab9f187affbceff89d51076038363b14113be29bda2ddfa17116de1d476", size = 10319156, upload-time = "2026-01-21T13:21:03.266Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c2/05fdd64ac003a560d4fbd1faa7d9a31d75df8f901675e5bed1ee2ceeff87/ty-0.0.13-py3-none-win_arm64.whl", hash = "sha256:1c9630333497c77bb9bcabba42971b96ee1f36c601dd3dcac66b4134f9fa38f0", size = 9808316, upload-time = "2026-01-21T13:20:54.053Z" }, ] [[package]] name = "typer" -version = "0.15.2" +version = "0.21.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -1935,9 +2257,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711, upload-time = "2025-02-27T19:17:34.807Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061, upload-time = "2025-02-27T19:17:32.111Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, ] [[package]] @@ -1963,11 +2285,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.6.2" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] [[package]] @@ -1985,56 +2307,70 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.31.2" +version = "20.36.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, ] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.3.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/3a/c63d2afd6dc2cad55a44bea48c7db75edde859e320bdceb9351ba63fceb6/wcwidth-0.3.3.tar.gz", hash = "sha256:f8f7d42c8a067d909b80b425342d02c423c5edc546347475e1d402fe3d35bb63", size = 233784, upload-time = "2026-01-24T16:23:58.578Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/4f/bc/ab575ebf0254577034d23908299b0d13ea5d7ceb35f43a5c08acf2252826/wcwidth-0.3.3-py3-none-any.whl", hash = "sha256:8e9056c446f21c7393514946d143a748c56aad72476844d3f215f7915276508f", size = 86509, upload-time = "2026-01-24T16:23:56.966Z" }, ] [[package]] name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] [[package]] @@ -2097,67 +2433,96 @@ wheels = [ [[package]] name = "yarl" -version = "1.20.1" +version = "1.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, - { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, - { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, - { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, - { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, - { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, - { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, - { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, - { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, - { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, - { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, - { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, - { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, - { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, - { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, - { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, - { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, - { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, - { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, - { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, - { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, - { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, - { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, - { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, - { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, - { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, - { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, - { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, - { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, - { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, - { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, - { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, - { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, - { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, - { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, - { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, - { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, - { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] [[package]] From 492c1f32992c2eeec9842e823e1e55668707107b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 20:50:49 +0100 Subject: [PATCH 332/565] fix: resolve type errors for ty 0.0.13 compatibility - Add proper cast for log_level Literal type in loaders.py - Remove redundant cast() calls in browser.py - Remove unused type: ignore comments --- linkedin_mcp_server/cli.py | 2 +- linkedin_mcp_server/config/__init__.py | 2 +- linkedin_mcp_server/config/loaders.py | 5 ++++- linkedin_mcp_server/drivers/browser.py | 6 ++---- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py index 67eae7fa..daad2ecd 100644 --- a/linkedin_mcp_server/cli.py +++ b/linkedin_mcp_server/cli.py @@ -12,7 +12,7 @@ import subprocess from typing import Any, Dict, List -import pyperclip # type: ignore +import pyperclip logger = logging.getLogger(__name__) diff --git a/linkedin_mcp_server/config/__init__.py b/linkedin_mcp_server/config/__init__.py index 771e2acb..6dc34bb0 100644 --- a/linkedin_mcp_server/config/__init__.py +++ b/linkedin_mcp_server/config/__init__.py @@ -22,7 +22,7 @@ def get_config() -> AppConfig: if _config is None: _config = load_config() logger.debug("Configuration loaded") - return _config # type: ignore[return-value] + return _config def reset_config() -> None: diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index f7cbdd6b..b030e617 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -8,6 +8,7 @@ import logging import os import sys +from typing import Literal, cast from dotenv import load_dotenv @@ -67,7 +68,9 @@ def load_from_env(config: AppConfig) -> AppConfig: if log_level_env := os.environ.get(EnvironmentKeys.LOG_LEVEL): log_level_upper = log_level_env.upper() if log_level_upper in ("DEBUG", "INFO", "WARNING", "ERROR"): - config.server.log_level = log_level_upper + config.server.log_level = cast( + Literal["DEBUG", "INFO", "WARNING", "ERROR"], log_level_upper + ) # Headless mode if os.environ.get(EnvironmentKeys.HEADLESS) in FALSY_VALUES: diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index ce43ddaa..37799d48 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -8,7 +8,6 @@ import logging from pathlib import Path -from typing import cast from linkedin_scraper import ( AuthenticationError, @@ -64,7 +63,7 @@ async def get_or_create_browser( session_path = DEFAULT_SESSION_PATH if _browser is not None: - return cast(BrowserManager, _browser) + return _browser config = get_config() viewport = { @@ -123,9 +122,8 @@ async def close_browser() -> None: global _browser if _browser is not None: - browser = cast(BrowserManager, _browser) logger.info("Closing browser...") - await browser.close() + await _browser.close() _browser = None logger.info("Browser closed") From f8bf52812ad621b6061f8e3b5087a0bee0ee8927 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 24 Jan 2026 22:15:23 +0100 Subject: [PATCH 333/565] test: add minimal testing suite with CI integration Add fast, high-ROI tests covering config, utils, exceptions, error handling, authentication, and MCP tools (mocked). Includes pytest configuration, coverage reporting, and CI test job. - Add pytest.ini with async support and pytest-xdist for parallel tests - Add .coveragerc with 45% threshold and branch coverage - Add tests for config schema, loaders, and singleton pattern - Add tests for exception hierarchy and error handler - Add tests for authentication source detection - Add mocked tests for all 5 MCP tools - Add CI test job running Python 3.14 with coverage --- .coveragerc | 8 ++ .github/workflows/ci.yml | 18 ++++ pyproject.toml | 1 + pytest.ini | 5 ++ tests/conftest.py | 55 ++++++++++++ tests/test_authentication.py | 44 ++++++++++ tests/test_config.py | 157 +++++++++++++++++++++++++++++++++++ tests/test_error_handler.py | 44 ++++++++++ tests/test_exceptions.py | 32 +++++++ tests/test_tools.py | 154 ++++++++++++++++++++++++++++++++++ tests/test_utils.py | 12 +++ uv.lock | 24 ++++++ 12 files changed, 554 insertions(+) create mode 100644 .coveragerc create mode 100644 pytest.ini create mode 100644 tests/conftest.py create mode 100644 tests/test_authentication.py create mode 100644 tests/test_config.py create mode 100644 tests/test_error_handler.py create mode 100644 tests/test_exceptions.py create mode 100644 tests/test_tools.py create mode 100644 tests/test_utils.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..c8796346 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,8 @@ +[run] +source = linkedin_mcp_server +branch = true +omit = linkedin_mcp_server/__main__.py + +[report] +fail_under = 45 +show_missing = true diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3a87a177..6bf14324 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,3 +29,21 @@ jobs: - name: Optimize uv cache for CI run: uv cache prune --ci + + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 + with: + enable-cache: true + + - run: uv python install 3.14 + + - run: uv sync --group dev + + - name: Run tests + run: uv run pytest --cov --cov-report=term-missing -n auto -v -s + + - run: uv cache prune --ci diff --git a/pyproject.toml b/pyproject.toml index b957b16a..c8ec657a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dev = [ "pytest>=8.3.5", "pytest-asyncio>=1.0.0", "pytest-cov>=6.1.1", + "pytest-xdist>=3.8.0", "ruff>=0.11.11", "ty>=0.0.1a12", ] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..b19b464a --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +testpaths = tests +asyncio_mode = auto +asyncio_default_fixture_loop_scope = function +addopts = -v --strict-markers -ra diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..0cd50c3e --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,55 @@ +import json + +import pytest + + +@pytest.fixture(autouse=True) +def reset_singletons(): + """Reset global state for test isolation.""" + from linkedin_mcp_server.config import reset_config + from linkedin_mcp_server.drivers.browser import reset_browser_for_testing + + reset_browser_for_testing() + reset_config() + yield + reset_browser_for_testing() + reset_config() + + +@pytest.fixture(autouse=True) +def isolate_session_path(tmp_path, monkeypatch): + """Redirect DEFAULT_SESSION_PATH to tmp_path.""" + fake_session = tmp_path / "session.json" + for module in [ + "linkedin_mcp_server.drivers.browser", + "linkedin_mcp_server.authentication", + "linkedin_mcp_server.cli_main", + "linkedin_mcp_server.setup", + ]: + try: + monkeypatch.setattr(f"{module}.DEFAULT_SESSION_PATH", fake_session) + except AttributeError: + pass # Module may not be imported yet + return fake_session + + +@pytest.fixture +def session_file(isolate_session_path): + """Create valid session file.""" + isolate_session_path.parent.mkdir(parents=True, exist_ok=True) + isolate_session_path.write_text( + json.dumps( + {"cookies": [{"name": "li_at", "value": "test", "domain": ".linkedin.com"}]} + ) + ) + return isolate_session_path + + +@pytest.fixture +def mock_context(): + """Mock FastMCP Context.""" + from unittest.mock import AsyncMock, MagicMock + + ctx = MagicMock() + ctx.report_progress = AsyncMock() + return ctx diff --git a/tests/test_authentication.py b/tests/test_authentication.py new file mode 100644 index 00000000..aa03009a --- /dev/null +++ b/tests/test_authentication.py @@ -0,0 +1,44 @@ +import pytest + +from linkedin_mcp_server.authentication import clear_session, get_authentication_source +from linkedin_mcp_server.exceptions import CredentialsNotFoundError + + +def test_get_auth_source_session(session_file, monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.authentication.session_exists", lambda: True + ) + assert get_authentication_source() == "session" + + +def test_get_auth_source_cookie(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.authentication.session_exists", lambda: False + ) + monkeypatch.setattr( + "linkedin_mcp_server.authentication.get_linkedin_cookie", lambda: "cookie" + ) + assert get_authentication_source() == "cookie" + + +def test_get_auth_source_none_raises(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.authentication.session_exists", lambda: False + ) + monkeypatch.setattr( + "linkedin_mcp_server.authentication.get_linkedin_cookie", lambda: None + ) + with pytest.raises(CredentialsNotFoundError): + get_authentication_source() + + +def test_clear_session_removes_file(session_file): + assert session_file.exists() + result = clear_session(session_file) + assert result is True + assert not session_file.exists() + + +def test_clear_session_no_file(isolate_session_path): + result = clear_session(isolate_session_path) + assert result is True # No error even if file doesn't exist diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..55f0b387 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,157 @@ +import pytest + +from linkedin_mcp_server.config.schema import ( + AppConfig, + BrowserConfig, + ConfigurationError, + ServerConfig, +) + + +class TestBrowserConfig: + def test_defaults(self): + config = BrowserConfig() + assert config.headless is True + assert config.default_timeout == 5000 + + def test_validate_passes(self): + BrowserConfig().validate() # No error + + def test_validate_negative_timeout(self): + with pytest.raises(ConfigurationError): + BrowserConfig(default_timeout=-1).validate() + + def test_validate_negative_slow_mo(self): + with pytest.raises(ConfigurationError): + BrowserConfig(slow_mo=-1).validate() + + +class TestServerConfig: + def test_defaults(self): + config = ServerConfig() + assert config.transport == "stdio" + assert config.port == 8000 + + +class TestAppConfig: + def test_validate_invalid_port(self): + config = AppConfig() + config.server.port = 99999 + with pytest.raises(ConfigurationError): + config.validate() + + +class TestConfigSingleton: + def test_get_config_returns_same_instance(self, monkeypatch): + # Mock sys.argv to prevent argparse from parsing pytest's arguments + monkeypatch.setattr("sys.argv", ["linkedin-mcp-server"]) + from linkedin_mcp_server.config import get_config + + assert get_config() is get_config() + + def test_reset_config_clears_singleton(self, monkeypatch): + # Mock sys.argv to prevent argparse from parsing pytest's arguments + monkeypatch.setattr("sys.argv", ["linkedin-mcp-server"]) + from linkedin_mcp_server.config import get_config, reset_config + + first = get_config() + reset_config() + second = get_config() + assert first is not second + + +class TestLoaders: + def test_load_from_env_headless_false(self, monkeypatch): + monkeypatch.setenv("HEADLESS", "false") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.headless is False + + def test_load_from_env_headless_true(self, monkeypatch): + monkeypatch.setenv("HEADLESS", "true") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.headless is True + + def test_load_from_env_log_level(self, monkeypatch): + monkeypatch.setenv("LOG_LEVEL", "DEBUG") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.server.log_level == "DEBUG" + + def test_load_from_env_defaults(self, monkeypatch): + # Clear env vars + for var in ["HEADLESS", "LOG_LEVEL"]: + monkeypatch.delenv(var, raising=False) + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.headless is True # default + + def test_load_from_env_transport(self, monkeypatch): + monkeypatch.setenv("TRANSPORT", "streamable-http") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.server.transport == "streamable-http" + assert config.server.transport_explicitly_set is True + + def test_load_from_env_invalid_transport(self, monkeypatch): + monkeypatch.setenv("TRANSPORT", "invalid") + from linkedin_mcp_server.config.loaders import load_from_env + + with pytest.raises(ConfigurationError, match="Invalid TRANSPORT"): + load_from_env(AppConfig()) + + def test_load_from_env_timeout(self, monkeypatch): + monkeypatch.setenv("TIMEOUT", "10000") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.default_timeout == 10000 + + def test_load_from_env_invalid_timeout(self, monkeypatch): + monkeypatch.setenv("TIMEOUT", "invalid") + from linkedin_mcp_server.config.loaders import load_from_env + + with pytest.raises(ConfigurationError, match="Invalid TIMEOUT"): + load_from_env(AppConfig()) + + def test_load_from_env_port(self, monkeypatch): + monkeypatch.setenv("PORT", "9000") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.server.port == 9000 + + def test_load_from_env_slow_mo(self, monkeypatch): + monkeypatch.setenv("SLOW_MO", "100") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.slow_mo == 100 + + def test_load_from_env_viewport(self, monkeypatch): + monkeypatch.setenv("VIEWPORT", "1920x1080") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.browser.viewport_width == 1920 + assert config.browser.viewport_height == 1080 + + def test_load_from_env_invalid_viewport(self, monkeypatch): + monkeypatch.setenv("VIEWPORT", "invalid") + from linkedin_mcp_server.config.loaders import load_from_env + + with pytest.raises(ConfigurationError, match="Invalid VIEWPORT"): + load_from_env(AppConfig()) + + def test_load_from_env_linkedin_cookie(self, monkeypatch): + monkeypatch.setenv("LINKEDIN_COOKIE", "test_cookie_value") + from linkedin_mcp_server.config.loaders import load_from_env + + config = load_from_env(AppConfig()) + assert config.server.linkedin_cookie == "test_cookie_value" diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py new file mode 100644 index 00000000..b229a7ff --- /dev/null +++ b/tests/test_error_handler.py @@ -0,0 +1,44 @@ +from linkedin_scraper.core.exceptions import RateLimitError + +from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.exceptions import ( + CredentialsNotFoundError, + SessionExpiredError, +) + + +def test_handles_session_expired(): + result = handle_tool_error(SessionExpiredError(), "test_tool") + assert result["error"] == "session_expired" + assert "message" in result + assert "resolution" in result + + +def test_handles_credentials_not_found(): + result = handle_tool_error(CredentialsNotFoundError("no creds"), "test_tool") + assert result["error"] == "authentication_not_found" + + +def test_handles_generic_exception(): + result = handle_tool_error(ValueError("oops"), "test_tool") + assert result["error"] == "unknown_error" + assert "oops" in result["message"] + + +def test_handles_rate_limit_with_suggested_wait(): + """Test RateLimitError with custom suggested_wait_time attribute.""" + error = RateLimitError("Rate limited") + error.suggested_wait_time = 600 # type: ignore[attr-defined] + result = handle_tool_error(error, "test_tool") + assert result["error"] == "rate_limit" + assert result["suggested_wait_seconds"] == 600 + assert "600" in result["resolution"] + + +def test_handles_rate_limit_default_wait(): + """Test RateLimitError without suggested_wait_time uses default 300.""" + error = RateLimitError("Rate limited") + result = handle_tool_error(error, "test_tool") + assert result["error"] == "rate_limit" + assert result["suggested_wait_seconds"] == 300 + assert "300" in result["resolution"] diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py new file mode 100644 index 00000000..657e7455 --- /dev/null +++ b/tests/test_exceptions.py @@ -0,0 +1,32 @@ +from linkedin_mcp_server.exceptions import ( + CookieAuthenticationError, + CredentialsNotFoundError, + LinkedInMCPError, + SessionExpiredError, +) + + +def test_base_exception(): + err = LinkedInMCPError("test") + assert str(err) == "test" + + +def test_session_expired_default_message(): + err = SessionExpiredError() + assert "expired" in str(err).lower() + + +def test_session_expired_custom_message(): + err = SessionExpiredError("custom") + assert str(err) == "custom" + + +def test_cookie_auth_default_message(): + err = CookieAuthenticationError() + assert "cookie" in str(err).lower() + + +def test_inheritance(): + assert issubclass(SessionExpiredError, LinkedInMCPError) + assert issubclass(CookieAuthenticationError, LinkedInMCPError) + assert issubclass(CredentialsNotFoundError, LinkedInMCPError) diff --git a/tests/test_tools.py b/tests/test_tools.py new file mode 100644 index 00000000..fe9a3166 --- /dev/null +++ b/tests/test_tools.py @@ -0,0 +1,154 @@ +from typing import Any, Callable, Coroutine +from unittest.mock import AsyncMock, MagicMock + +import pytest +from fastmcp import FastMCP + + +async def get_tool_fn( + mcp: FastMCP, name: str +) -> Callable[..., Coroutine[Any, Any, dict[str, Any]]]: + """Extract tool function from FastMCP by name using public API.""" + tool = await mcp.get_tool(name) + if tool is None: + raise ValueError(f"Tool '{name}' not found") + return tool.fn # type: ignore[attr-defined] + + +@pytest.fixture +def patch_tool_deps(monkeypatch): + """Patch ensure_authenticated and get_or_create_browser for all tools.""" + mock_browser = MagicMock() + mock_browser.page = MagicMock() + + for module in ["person", "company", "job"]: + monkeypatch.setattr( + f"linkedin_mcp_server.tools.{module}.ensure_authenticated", AsyncMock() + ) + monkeypatch.setattr( + f"linkedin_mcp_server.tools.{module}.get_or_create_browser", + AsyncMock(return_value=mock_browser), + ) + + return mock_browser + + +class TestPersonTool: + async def test_get_person_profile_success( + self, mock_context, patch_tool_deps, monkeypatch + ): + mock_person = MagicMock() + mock_person.to_dict.return_value = {"full_name": "Test User"} + mock_scraper = MagicMock() + mock_scraper.scrape = AsyncMock(return_value=mock_person) + monkeypatch.setattr( + "linkedin_mcp_server.tools.person.PersonScraper", + lambda *a, **kw: mock_scraper, + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_person_profile") + result = await tool_fn("test-user", mock_context) + assert result["full_name"] == "Test User" + + async def test_get_person_profile_error(self, mock_context, monkeypatch): + from linkedin_mcp_server.exceptions import SessionExpiredError + + monkeypatch.setattr( + "linkedin_mcp_server.tools.person.ensure_authenticated", + AsyncMock(side_effect=SessionExpiredError()), + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_person_profile") + result = await tool_fn("test-user", mock_context) + assert result["error"] == "session_expired" + + +class TestCompanyTools: + async def test_get_company_profile( + self, mock_context, patch_tool_deps, monkeypatch + ): + mock_company = MagicMock() + mock_company.to_dict.return_value = {"name": "Test Corp"} + mock_scraper = MagicMock() + mock_scraper.scrape = AsyncMock(return_value=mock_company) + monkeypatch.setattr( + "linkedin_mcp_server.tools.company.CompanyScraper", + lambda *a, **kw: mock_scraper, + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_profile") + result = await tool_fn("testcorp", mock_context) + assert result["name"] == "Test Corp" + + async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatch): + mock_post = MagicMock() + mock_post.to_dict.return_value = {"text": "Hello world"} + mock_scraper = MagicMock() + mock_scraper.scrape = AsyncMock(return_value=[mock_post]) + monkeypatch.setattr( + "linkedin_mcp_server.tools.company.CompanyPostsScraper", + lambda *a, **kw: mock_scraper, + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_posts") + result = await tool_fn("testcorp", mock_context, limit=5) + assert result["count"] == 1 + assert result["posts"][0]["text"] == "Hello world" + + +class TestJobTools: + async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch): + mock_job = MagicMock() + mock_job.to_dict.return_value = {"title": "Engineer"} + mock_scraper = MagicMock() + mock_scraper.scrape = AsyncMock(return_value=mock_job) + monkeypatch.setattr( + "linkedin_mcp_server.tools.job.JobScraper", lambda *a, **kw: mock_scraper + ) + + from linkedin_mcp_server.tools.job import register_job_tools + + mcp = FastMCP("test") + register_job_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_job_details") + result = await tool_fn("12345", mock_context) + assert result["title"] == "Engineer" + + async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): + mock_scraper = MagicMock() + mock_scraper.search = AsyncMock(return_value=["url1", "url2"]) + monkeypatch.setattr( + "linkedin_mcp_server.tools.job.JobSearchScraper", + lambda *a, **kw: mock_scraper, + ) + + from linkedin_mcp_server.tools.job import register_job_tools + + mcp = FastMCP("test") + register_job_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "search_jobs") + result = await tool_fn("python", mock_context, location="Remote", limit=10) + assert result["count"] == 2 + assert "url1" in result["job_urls"] diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..d660283c --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,12 @@ +def test_get_linkedin_cookie_present(monkeypatch): + monkeypatch.setenv("LINKEDIN_COOKIE", "test_cookie") + from linkedin_mcp_server.utils import get_linkedin_cookie + + assert get_linkedin_cookie() == "test_cookie" + + +def test_get_linkedin_cookie_missing(monkeypatch): + monkeypatch.delenv("LINKEDIN_COOKIE", raising=False) + from linkedin_mcp_server.utils import get_linkedin_cookie + + assert get_linkedin_cookie() is None diff --git a/uv.lock b/uv.lock index 5cdb7095..78dca9a8 100644 --- a/uv.lock +++ b/uv.lock @@ -591,6 +591,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + [[package]] name = "fakeredis" version = "2.33.0" @@ -1020,6 +1029,7 @@ dev = [ { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, + { name = "pytest-xdist" }, { name = "ruff" }, { name = "ty" }, ] @@ -1041,6 +1051,7 @@ dev = [ { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, + { name = "pytest-xdist", specifier = ">=3.8.0" }, { name = "ruff", specifier = ">=0.11.11" }, { name = "ty", specifier = ">=0.0.1a12" }, ] @@ -1876,6 +1887,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + [[package]] name = "python-dotenv" version = "1.2.1" From 381092b90b0875d9aa691be6721f55447c530178 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 01:09:13 +0000 Subject: [PATCH 334/565] chore(deps): update ghcr.io/astral-sh/uv:latest docker digest to 143b40f --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 184c07eb..8253631a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM python:3.14-slim-bookworm@sha256:adb6bdfbcc7c744c3b1a05976136555e2d82b7df01ac3efe71737d7f95ef0f2d # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9a23023be68b2ed09750ae636228e903a54a05ea56ed03a934d00fe9fbeded4b /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:143b40f4ab56a780f43377604702107b5a35f83a4453daf1e4be691358718a6a /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From f48d360037856d492475b00a7678b6ce8c2c5555 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 09:49:39 +0000 Subject: [PATCH 335/565] chore(deps): update anthropics/claude-code-action digest to 231bd75 --- .github/workflows/claude.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index e8c339f2..b1ce7e65 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@f64219702d7454cf29fe32a74104be6ed43dc637 # v1 + uses: anthropics/claude-code-action@231bd75b7196d48291c1498f1c6d277c2810d9a3 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} From 8e29eba35f6eb8b019d2f60ca04e1efd3d498c2f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 Jan 2026 11:27:16 +0100 Subject: [PATCH 336/565] fix(deps): update linkedin-scraper to 3.1.1 to fix --get-session hang Updates linkedin-scraper from >=3.1.0 to >=3.1.1 which includes a fix for authentication detection that was causing --get-session to hang indefinitely. The upstream fix (joeyism/linkedin_scraper@55f2305) improves is_logged_in() to handle LinkedIn's A/B tested DOM variants by: - Adding URL-based fallback detection - Checking multiple nav selector patterns - Failing fast on auth blocker URLs Resolves: #95 Related: joeyism/linkedin_scraper#269 --- pyproject.toml | 2 +- uv.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c8ec657a..998abf69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ requires-python = ">=3.12" dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper>=3.1.0", + "linkedin-scraper>=3.1.1", "playwright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/uv.lock b/uv.lock index 78dca9a8..87b79086 100644 --- a/uv.lock +++ b/uv.lock @@ -1038,7 +1038,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", specifier = ">=3.1.0" }, + { name = "linkedin-scraper", specifier = ">=3.1.1" }, { name = "playwright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1058,7 +1058,7 @@ dev = [ [[package]] name = "linkedin-scraper" -version = "3.1.0" +version = "3.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -1068,9 +1068,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d2/30/967d78a67bc974e65491582e23993ca078d47c7b634842af13c8422162b9/linkedin_scraper-3.1.0.tar.gz", hash = "sha256:830bd3a4c16aeb667f5a00c0eed7528c80e0b360016f4c8eecd9cebad0d8728e", size = 46636, upload-time = "2026-01-18T23:55:47.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/e0/389404ecd6d0efec2cc5be0d1c456d6b8f723865b03e0b6567945c713594/linkedin_scraper-3.1.1.tar.gz", hash = "sha256:3232a16a72053f8969464dc7386a7a0b2c262f9fe7c53f41734857ea9c908588", size = 47349, upload-time = "2026-01-27T06:05:53.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/a7/ce6de57a4bd75bfadaa23fb8f3eaa0b86de779335c13be08f8bbf3846438/linkedin_scraper-3.1.0-py3-none-any.whl", hash = "sha256:1e3ad52cd858d25034cab5f82261bfe35451941faec6003714aff2e745939212", size = 52372, upload-time = "2026-01-18T23:55:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f6/a17d0be0cf6c9a85709e287f137c9f1227c44e208e737ef371f83dc5829b/linkedin_scraper-3.1.1-py3-none-any.whl", hash = "sha256:5977a28dcaa2b1d14caa0e3ea28f6d7c16e393f1140088b22588200a8498657c", size = 53017, upload-time = "2026-01-27T06:05:52.48Z" }, ] [[package]] From 7182ca9903bcf9ccf282a0f6ad2445be01004b8d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 Jan 2026 11:30:30 +0100 Subject: [PATCH 337/565] chore(release): bump version to 2.3.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 998abf69..dad5b601 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.3.0" +version = "2.3.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 87b79086..5219dade 100644 --- a/uv.lock +++ b/uv.lock @@ -1011,7 +1011,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.3.0" +version = "2.3.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 94d14ee1535c9bf78cc819422dcf61e79172a92e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 10:32:01 +0000 Subject: [PATCH 338/565] chore: update manifest.json and docker-compose.yml to v2.3.1 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8900ae64..b6d7180c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.0 + image: stickerdaniel/linkedin-mcp-server:2.3.1 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 5f6f651c..8b0ada44 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.0", + "version": "2.3.1", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.0\n```\n\n### 2. Create LinkedIn Session\nUse one of these methods:\n- **Cookie**: Pass your `li_at` cookie via `LINKEDIN_COOKIE` environment variable\n- **Browser login**: Use the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal) to log in interactively\n\n## Requirements\n- Docker installed and running\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.1", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.0" + "stickerdaniel/linkedin-mcp-server:2.3.1" ] } }, From dda0d3ee3c83e5bea4de8496d84d7d70e90b632d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 27 Jan 2026 12:51:07 +0100 Subject: [PATCH 339/565] fix(docker): remove non-existent /opt/python from chmod The base image already provides Python, so uv doesn't install a separate Python version to /opt/python. Remove the reference to fix the Docker build failure. --- Dockerfile | 6 ++---- pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8253631a..051185ff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,15 +15,13 @@ RUN chown pwuser:pwuser /app # Copy project files with correct ownership COPY --chown=pwuser:pwuser . /app -# Set paths for Playwright browsers and uv Python installs to shared locations +# Set Playwright browser install location ENV PLAYWRIGHT_BROWSERS_PATH=/opt/playwright -ENV UV_PYTHON_INSTALL_DIR=/opt/python - # Install dependencies and Playwright with ONLY Chromium (not Firefox/WebKit) # --with-deps installs required system dependencies (fonts, libraries) via apt (needs root) RUN uv sync --frozen && \ uv run playwright install --with-deps chromium && \ - chmod -R 755 /opt/playwright /opt/python + chmod -R 755 /opt/playwright # Fix ownership of app directory (venv created by uv) RUN chown -R pwuser:pwuser /app diff --git a/pyproject.toml b/pyproject.toml index dad5b601..a7dcb4ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "2.3.1" +version = "2.3.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 5219dade..62057316 100644 --- a/uv.lock +++ b/uv.lock @@ -1011,7 +1011,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "2.3.1" +version = "2.3.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From a18478886f546a4132da1d6385489365145fff0e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 27 Jan 2026 11:52:09 +0000 Subject: [PATCH 340/565] chore: update manifest.json and docker-compose.yml to v2.3.2 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b6d7180c..c2159fd1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.1 + image: stickerdaniel/linkedin-mcp-server:2.3.2 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 8b0ada44..10230a73 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.1", + "version": "2.3.2", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.1", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.2", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.1" + "stickerdaniel/linkedin-mcp-server:2.3.2" ] } }, From 718815757367a591d18552fa1681eb85c60383f1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 20:25:46 +0100 Subject: [PATCH 341/565] chore(renovate): reduce PR noise with aggressive grouping - Add weekly schedule (Monday 6am) - Group all GitHub Actions and Docker images as 'CI dependencies' - Group all major updates together - Remove automerge (insufficient test coverage) --- renovate.json | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/renovate.json b/renovate.json index 8ba7f14d..5190dc4b 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,7 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": ["config:best-practices", "group:allNonMajor"], + "schedule": ["before 6am on Monday"], "vulnerabilityAlerts": { "enabled": true, "labels": ["security"], @@ -8,20 +9,27 @@ }, "packageRules": [ { + "description": "Managed by release workflow, not Renovate", "matchPackageNames": ["stickerdaniel/linkedin-mcp-server"], "matchManagers": ["docker-compose"], - "enabled": false, - "description": "Managed by release workflow, not Renovate" + "enabled": false }, { - "matchPackageNames": ["fastmcp", "mcp"], - "matchUpdateTypes": ["minor", "patch"], - "groupName": "MCP ecosystem" + "description": "Group all CI dependencies (GitHub Actions + Docker)", + "matchManagers": ["github-actions", "dockerfile", "docker-compose"], + "matchPackageNames": ["!stickerdaniel/linkedin-mcp-server"], + "groupName": "CI dependencies" }, { - "matchPackageNames": ["pytest**", "ruff", "pre-commit", "ty"], + "description": "Group all major updates together", + "matchUpdateTypes": ["major"], + "groupName": "all major dependencies" + }, + { + "description": "Group MCP ecosystem packages", + "matchPackageNames": ["fastmcp", "mcp"], "matchUpdateTypes": ["minor", "patch"], - "automerge": true + "groupName": "MCP ecosystem" } ] } From 7d02c83df1b36da1416a8c21592e84d876fa8a4d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 22:22:50 +0100 Subject: [PATCH 342/565] feat(ci): add automated PyPI publishing with Trusted Publishing - Change package name to linkedin-scraper-mcp for PyPI availability - Add PyPI metadata (authors, license, keywords, classifiers, URLs) - Add id-token permission and publish steps to release workflow - Update README with new uvx install commands and PyPI badge - Update AGENTS.md with uvx command - Create py.typed marker for type checker support Users can now install with: uvx linkedin-scraper-mcp --- .github/workflows/release.yml | 14 ++++++++++++++ AGENTS.md | 5 +++-- README.md | 34 +++++++++++++++------------------ linkedin_mcp_server/py.typed | 0 pyproject.toml | 36 ++++++++++++++++++++++++++++++++++- 5 files changed, 67 insertions(+), 22 deletions(-) create mode 100644 linkedin_mcp_server/py.typed diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 870d65ab..a3ca0a82 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -54,6 +54,7 @@ jobs: permissions: contents: write packages: write + id-token: write # Required for PyPI Trusted Publishing steps: - name: Checkout code @@ -161,8 +162,21 @@ jobs: name: "LinkedIn MCP Server v${{ env.VERSION }}" body_path: RELEASE_NOTES.md + - name: Build package distributions + run: | + uv build + echo "Built package distributions:" + ls -lh dist/ + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + print-hash: true + verbose: true + - name: Summary run: | echo "Successfully released v$VERSION!" echo "Docker: stickerdaniel/linkedin-mcp-server:$VERSION" + echo "PyPI: https://pypi.org/project/linkedin-scraper-mcp/$VERSION/" echo "GitHub: https://github.com/${{ github.repository }}/releases/tag/v$VERSION" diff --git a/AGENTS.md b/AGENTS.md index 566d6c1d..bad2dd05 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -8,8 +8,9 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Use `uv` for dependency management: `uv sync` (installs all dependencies) - Development dependencies: `uv sync --group dev` -- Bump version: `uv version --bump minor` (or `major`, `patch`) - git tag is created automatically by release workflow. Once Docker image is published, manually file a PR in the MCP registry to update the version. +- Bump version: `uv version --bump minor` (or `major`, `patch`) - git tag is created automatically by release workflow. Once Docker image and PyPI package are published, manually file a PR in the MCP registry to update the version. - Run server locally: `uv run -m linkedin_mcp_server --no-headless` +- Run via uvx (PyPI): `uvx linkedin-scraper-mcp` - Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` **Code Quality:** @@ -23,7 +24,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co **Docker Commands:** - Build: `docker build -t linkedin-mcp-server .` -- Get session: Use uvx locally first: `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` +- Get session: Use uvx locally first: `uvx linkedin-scraper-mcp --get-session` ## Architecture Overview diff --git a/README.md b/README.md index dbfcaa9b..a394087a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # LinkedIn MCP Server

+ PyPI CI Status Release License @@ -61,7 +62,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company **Step 1: Create a session (first time only)** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session +uvx linkedin-scraper-mcp --get-session ``` This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. @@ -69,11 +70,11 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc **Step 2: Run the server** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server +uvx linkedin-scraper-mcp ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again. ### uvx Setup Help @@ -87,11 +88,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp "mcpServers": { "linkedin": { "command": "uvx", - "args": [ - "--from", - "git+https://github.com/stickerdaniel/linkedin-mcp-server", - "linkedin-mcp-server" - ] + "args": ["linkedin-scraper-mcp"] } } } @@ -119,17 +116,16 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp ```bash # Create a session interactively -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session +uvx linkedin-scraper-mcp --get-session # Run with debug logging -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --log-level DEBUG +uvx linkedin-scraper-mcp --log-level DEBUG ``` **HTTP Mode Example (for web-based MCP clients):** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server \ - --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp +uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp ``` **Test with mcp inspector:** @@ -159,7 +155,7 @@ uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp **Login issues:** - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve it manually. +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve it manually. **Timeout issues:** @@ -216,7 +212,7 @@ Create a session file locally, then mount it into Docker. **Step 1: Create session using uvx (one-time setup)** ```bash -uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session +uvx linkedin-scraper-mcp --get-session ``` This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. @@ -239,7 +235,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again locally, or use a fresh `li_at` cookie. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again locally, or use a fresh `li_at` cookie. > [!NOTE] > **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server. You have two options: @@ -303,7 +299,7 @@ docker run -it --rm \ - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** @@ -329,10 +325,10 @@ docker run -it --rm \ 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop -3. Create a session: `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` +3. Create a session: `uvx linkedin-scraper-mcp --get-session` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` again. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again. ### DXT Extension Setup Help @@ -358,7 +354,7 @@ docker run -it --rm \ - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx --from git+https://github.com/stickerdaniel/linkedin-mcp-server linkedin-mcp-server --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** diff --git a/linkedin_mcp_server/py.typed b/linkedin_mcp_server/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/pyproject.toml b/pyproject.toml index a7dcb4ca..8a636a8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,36 @@ [project] -name = "linkedin-mcp-server" +name = "linkedin-scraper-mcp" version = "2.3.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" +authors = [ + { name = "Daniel Sticker", email = "daniel@sticker.name" } +] +license = "Apache-2.0" +keywords = [ + "linkedin", + "mcp", + "model-context-protocol", + "scraper", + "ai", + "automation", + "llm", + "anthropic", + "claude", +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Internet :: WWW/HTTP :: Dynamic Content", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Environment :: Console", + "Operating System :: OS Independent", +] dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", @@ -13,6 +40,13 @@ dependencies = [ "python-dotenv>=1.1.1", ] +[project.urls] +Homepage = "https://github.com/stickerdaniel/linkedin-mcp-server" +Documentation = "https://github.com/stickerdaniel/linkedin-mcp-server#readme" +Repository = "https://github.com/stickerdaniel/linkedin-mcp-server" +Issues = "https://github.com/stickerdaniel/linkedin-mcp-server/issues" +Changelog = "https://github.com/stickerdaniel/linkedin-mcp-server/releases" + [project.scripts] linkedin-mcp-server = "linkedin_mcp_server.cli_main:main" From 1980ac04814b9502dc9c60cf4fe1fbe0a8ed7180 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 22:34:09 +0100 Subject: [PATCH 343/565] chore: bump version to 2.3.3 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8a636a8f..12a1363f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.2" +version = "2.3.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" From cce68f17e56ff290eccd7ef8f96e7e7474604685 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 1 Feb 2026 21:34:39 +0000 Subject: [PATCH 344/565] chore: update manifest.json and docker-compose.yml to v2.3.3 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index c2159fd1..eec65122 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.2 + image: stickerdaniel/linkedin-mcp-server:2.3.3 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 10230a73..d8660e01 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.2", + "version": "2.3.3", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.2", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.3", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.2" + "stickerdaniel/linkedin-mcp-server:2.3.3" ] } }, From d70711505a382eb93d1f9dec7d70f375a32ffb39 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 23:35:50 +0100 Subject: [PATCH 345/565] fix: regenerate uv.lock for new package name --- uv.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/uv.lock b/uv.lock index 62057316..acfeed9a 100644 --- a/uv.lock +++ b/uv.lock @@ -1010,8 +1010,25 @@ wheels = [ ] [[package]] -name = "linkedin-mcp-server" -version = "2.3.2" +name = "linkedin-scraper" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiofiles" }, + { name = "lxml" }, + { name = "playwright" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/59/e0/389404ecd6d0efec2cc5be0d1c456d6b8f723865b03e0b6567945c713594/linkedin_scraper-3.1.1.tar.gz", hash = "sha256:3232a16a72053f8969464dc7386a7a0b2c262f9fe7c53f41734857ea9c908588", size = 47349, upload-time = "2026-01-27T06:05:53.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/f6/a17d0be0cf6c9a85709e287f137c9f1227c44e208e737ef371f83dc5829b/linkedin_scraper-3.1.1-py3-none-any.whl", hash = "sha256:5977a28dcaa2b1d14caa0e3ea28f6d7c16e393f1140088b22588200a8498657c", size = 53017, upload-time = "2026-01-27T06:05:52.48Z" }, +] + +[[package]] +name = "linkedin-scraper-mcp" +version = "2.3.3" source = { editable = "." } dependencies = [ { name = "fastmcp" }, @@ -1056,23 +1073,6 @@ dev = [ { name = "ty", specifier = ">=0.0.1a12" }, ] -[[package]] -name = "linkedin-scraper" -version = "3.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiofiles" }, - { name = "lxml" }, - { name = "playwright" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/59/e0/389404ecd6d0efec2cc5be0d1c456d6b8f723865b03e0b6567945c713594/linkedin_scraper-3.1.1.tar.gz", hash = "sha256:3232a16a72053f8969464dc7386a7a0b2c262f9fe7c53f41734857ea9c908588", size = 47349, upload-time = "2026-01-27T06:05:53.871Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/f6/a17d0be0cf6c9a85709e287f137c9f1227c44e208e737ef371f83dc5829b/linkedin_scraper-3.1.1-py3-none-any.whl", hash = "sha256:5977a28dcaa2b1d14caa0e3ea28f6d7c16e393f1140088b22588200a8498657c", size = 53017, upload-time = "2026-01-27T06:05:52.48Z" }, -] - [[package]] name = "lupa" version = "2.6" From f2f58dc8cf8b2b653a4051d7404d1d208f281dad Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 23:37:03 +0100 Subject: [PATCH 346/565] chore: bump version to 2.3.4 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 12a1363f..f4a37b24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.3" +version = "2.3.4" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" From caca78698ddf8c93f31f43eb707c9d99952b105f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 1 Feb 2026 22:37:29 +0000 Subject: [PATCH 347/565] chore: update manifest.json and docker-compose.yml to v2.3.4 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index eec65122..14715337 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.3 + image: stickerdaniel/linkedin-mcp-server:2.3.4 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index d8660e01..2752221b 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.3", + "version": "2.3.4", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.3", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.4", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.3" + "stickerdaniel/linkedin-mcp-server:2.3.4" ] } }, From 67fe1cf5f1aff301384a95908b5460269f10e11f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 23:47:05 +0100 Subject: [PATCH 348/565] fix: add linkedin-scraper-mcp CLI alias for cleaner uvx usage --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index f4a37b24..d3d45c35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,7 @@ Changelog = "https://github.com/stickerdaniel/linkedin-mcp-server/releases" [project.scripts] linkedin-mcp-server = "linkedin_mcp_server.cli_main:main" +linkedin-scraper-mcp = "linkedin_mcp_server.cli_main:main" [build-system] requires = ["setuptools>=68.0", "wheel"] From 59de989c40823af091bd49185c9020b85d06a274 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 1 Feb 2026 23:47:22 +0100 Subject: [PATCH 349/565] chore: bump version to 2.3.5 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d3d45c35..cd900e66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.4" +version = "2.3.5" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index acfeed9a..619e1079 100644 --- a/uv.lock +++ b/uv.lock @@ -1028,7 +1028,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "2.3.3" +version = "2.3.5" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From a3daff5bc4e84e0721cb3135f3c80669ce74d1ce Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 1 Feb 2026 22:47:44 +0000 Subject: [PATCH 350/565] chore: update manifest.json and docker-compose.yml to v2.3.5 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 14715337..36305cf9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.4 + image: stickerdaniel/linkedin-mcp-server:2.3.5 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 2752221b..f5fd3644 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.4", + "version": "2.3.5", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.4", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.5", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.4" + "stickerdaniel/linkedin-mcp-server:2.3.5" ] } }, From 7ae5b94419fbc33c29558689df808001283dfbce Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 05:50:22 +0000 Subject: [PATCH 351/565] chore(deps): update ci dependencies --- .github/workflows/ci.yml | 8 ++++---- .github/workflows/claude.yml | 4 ++-- .github/workflows/release.yml | 10 +++++----- Dockerfile | 4 ++-- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6bf14324..4d803e41 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,10 +12,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up uv - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 + uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 with: enable-cache: true @@ -33,9 +33,9 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 + - uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 with: enable-cache: true diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index b1ce7e65..1057d08f 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -26,13 +26,13 @@ jobs: actions: read # Required for Claude to read CI results on PRs steps: - name: Checkout repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 1 - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@231bd75b7196d48291c1498f1c6d277c2810d9a3 # v1 + uses: anthropics/claude-code-action@b113f49a56229d8276e2bf05743ad6900121239c # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a3ca0a82..7456a119 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,12 +13,12 @@ jobs: should-release: ${{ steps.check.outputs.should-release }} new-version: ${{ steps.check.outputs.new-version }} steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 + uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 with: enable-cache: true @@ -58,12 +58,12 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7 + uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 with: enable-cache: true @@ -110,7 +110,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 - name: Log in to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} diff --git a/Dockerfile b/Dockerfile index 051185ff..7bda0ac1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.14-slim-bookworm@sha256:adb6bdfbcc7c744c3b1a05976136555e2d82b7df01ac3efe71737d7f95ef0f2d +FROM python:3.14-slim-bookworm@sha256:f0540d0436a220db0a576ccfe75631ab072391e43a24b88972ef9833f699095f # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:143b40f4ab56a780f43377604702107b5a35f83a4453daf1e4be691358718a6a /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:78a7ff97cd27b7124a5f3c2aefe146170793c56a1e03321dd31a289f6d82a04f /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From 96b932d2a1520891e861d5227af52335607bb901 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 9 Feb 2026 01:44:28 +0000 Subject: [PATCH 352/565] chore(deps): update ci dependencies --- .github/workflows/ci.yml | 4 ++-- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4d803e41..977d7b9d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up uv - uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 + uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 with: enable-cache: true @@ -35,7 +35,7 @@ jobs: steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 + - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 with: enable-cache: true diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 1057d08f..752ccaaf 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@b113f49a56229d8276e2bf05743ad6900121239c # v1 + uses: anthropics/claude-code-action@6c61301d8e1ee91bef7b65172f93462bbb216394 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7456a119..a963d597 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 + uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 with: enable-cache: true @@ -63,7 +63,7 @@ jobs: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7 + uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 with: enable-cache: true From d5de8ef777f1e753573c04d0ab919d540bd2aa7e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Feb 2026 08:48:51 +0100 Subject: [PATCH 353/565] fix(deps): use linkedin_scraper fork with rate limit fix Point dependency at stickerdaniel/linkedin_scraper fork (fix/rate-limit-false-positive) to fix detect_rate_limit() false-firing on React RSC payloads. Also update docs with detailed release workflow notes and bump opencode agent models to gpt-5.3-codex. See also: joeyism/linkedin_scraper#278 --- .opencode/agents/code-reviewer.md | 6 +++--- .opencode/agents/code-simplifier.md | 6 +++--- .opencode/agents/comment-analyzer.md | 6 +++--- .opencode/agents/pr-test-analyzer.md | 6 +++--- .opencode/agents/silent-failure-hunter.md | 6 +++--- .opencode/agents/type-design-analyzer.md | 6 +++--- AGENTS.md | 2 +- pyproject.toml | 7 +++++-- uv.lock | 10 +++------- 9 files changed, 27 insertions(+), 28 deletions(-) diff --git a/.opencode/agents/code-reviewer.md b/.opencode/agents/code-reviewer.md index d0ee6897..3ab541a3 100644 --- a/.opencode/agents/code-reviewer.md +++ b/.opencode/agents/code-reviewer.md @@ -2,9 +2,9 @@ name: code-reviewer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#22c55e' description: | Use this agent when you need to review code for adherence to project guidelines, style guides, and best practices. This agent should be used proactively after writing or modifying code, especially before committing changes or creating pull requests. It will check for style violations, potential issues, and ensure code follows the established patterns in CLAUDE.md. Also the agent needs to know which files to focus on for the review. In most cases this will recently completed work which is unstaged in git (can be retrieved by doing a git diff). However there can be cases where this is different, make sure to specify this as the agent input when calling the agent. diff --git a/.opencode/agents/code-simplifier.md b/.opencode/agents/code-simplifier.md index 5124308d..325929c4 100644 --- a/.opencode/agents/code-simplifier.md +++ b/.opencode/agents/code-simplifier.md @@ -2,9 +2,9 @@ name: code-simplifier mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#3b82f6' description: | Use this agent when code has been written or modified and needs to be simplified for clarity, consistency, and maintainability while preserving all functionality. This agent should be triggered automatically after completing a coding task or writing a logical chunk of code. It simplifies code by following project best practices while retaining all functionality. The agent focuses only on recently modified code unless instructed otherwise. diff --git a/.opencode/agents/comment-analyzer.md b/.opencode/agents/comment-analyzer.md index 52f02d72..d55b0b2c 100644 --- a/.opencode/agents/comment-analyzer.md +++ b/.opencode/agents/comment-analyzer.md @@ -2,9 +2,9 @@ name: comment-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#10b981' description: | Use this agent when you need to analyze code comments for accuracy, completeness, and long-term maintainability. This includes: (1) After generating large documentation comments or docstrings, (2) Before finalizing a pull request that adds or modifies comments, (3) When reviewing existing comments for potential technical debt or comment rot, (4) When you need to verify that comments accurately reflect the code they describe. diff --git a/.opencode/agents/pr-test-analyzer.md b/.opencode/agents/pr-test-analyzer.md index 3e45a90e..76a8c726 100644 --- a/.opencode/agents/pr-test-analyzer.md +++ b/.opencode/agents/pr-test-analyzer.md @@ -2,9 +2,9 @@ name: pr-test-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#06b6d4' description: | Use this agent when you need to review a pull request for test coverage quality and completeness. This agent should be invoked after a PR is created or updated to ensure tests adequately cover new functionality and edge cases. Examples: diff --git a/.opencode/agents/silent-failure-hunter.md b/.opencode/agents/silent-failure-hunter.md index 3b6e467f..7fafe9e9 100644 --- a/.opencode/agents/silent-failure-hunter.md +++ b/.opencode/agents/silent-failure-hunter.md @@ -2,9 +2,9 @@ name: silent-failure-hunter mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#eab308' description: | Use this agent when reviewing code changes in a pull request to identify silent failures, inadequate error handling, and inappropriate fallback behavior. This agent should be invoked proactively after completing a logical chunk of work that involves error handling, catch blocks, fallback logic, or any code that could potentially suppress errors. Examples: diff --git a/.opencode/agents/type-design-analyzer.md b/.opencode/agents/type-design-analyzer.md index 2b9549f3..746fef41 100644 --- a/.opencode/agents/type-design-analyzer.md +++ b/.opencode/agents/type-design-analyzer.md @@ -2,9 +2,9 @@ name: type-design-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.2-codex' -variant: 'xhigh' -# model: 'github-copilot/gpt-5.2-codex' +model: 'openai/gpt-5.3-codex' +variant: 'high' +# model: 'github-copilot/gpt-5.3-codex' color: '#ec4899' description: | Use this agent when you need expert analysis of type design in your codebase. Specifically use it: (1) when introducing a new type to ensure it follows best practices for encapsulation and invariant expression, (2) during pull request creation to review all types being added, (3) when refactoring existing types to improve their design quality. The agent will provide both qualitative feedback and quantitative ratings on encapsulation, invariant expression, usefulness, and enforcement. diff --git a/AGENTS.md b/AGENTS.md index bad2dd05..7829acfa 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -8,7 +8,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Use `uv` for dependency management: `uv sync` (installs all dependencies) - Development dependencies: `uv sync --group dev` -- Bump version: `uv version --bump minor` (or `major`, `patch`) - git tag is created automatically by release workflow. Once Docker image and PyPI package are published, manually file a PR in the MCP registry to update the version. +- Bump version: `uv version --bump minor` (or `major`, `patch`) - this is the **only manual step** for a release. The GitHub Actions release workflow (`.github/workflows/release.yml`) automatically handles: manifest.json/docker-compose.yml version updates, git tag, Docker build & push, DXT extension, GitHub release, and PyPI publish. After the workflow completes, manually file a PR in the MCP registry to update the version. - Run server locally: `uv run -m linkedin_mcp_server --no-headless` - Run via uvx (PyPI): `uvx linkedin-scraper-mcp` - Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` diff --git a/pyproject.toml b/pyproject.toml index cd900e66..f8eb562b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.5" +version = "2.3.6" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" @@ -34,7 +34,7 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper>=3.1.1", + "linkedin-scraper", "playwright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", @@ -62,6 +62,9 @@ exclude = ["assets*", "docs*", "tests*"] [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] +[tool.uv.sources] +linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "fix/rate-limit-false-positive" } + [dependency-groups] dev = [ "aiohttp>=3.12.13", diff --git a/uv.lock b/uv.lock index 619e1079..85c89e1e 100644 --- a/uv.lock +++ b/uv.lock @@ -1012,7 +1012,7 @@ wheels = [ [[package]] name = "linkedin-scraper" version = "3.1.1" -source = { registry = "https://pypi.org/simple" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=fix%2Frate-limit-false-positive#092aef732a1a276b61052e5cefccfcfea0c3695d" } dependencies = [ { name = "aiofiles" }, { name = "lxml" }, @@ -1021,14 +1021,10 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/e0/389404ecd6d0efec2cc5be0d1c456d6b8f723865b03e0b6567945c713594/linkedin_scraper-3.1.1.tar.gz", hash = "sha256:3232a16a72053f8969464dc7386a7a0b2c262f9fe7c53f41734857ea9c908588", size = 47349, upload-time = "2026-01-27T06:05:53.871Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/f6/a17d0be0cf6c9a85709e287f137c9f1227c44e208e737ef371f83dc5829b/linkedin_scraper-3.1.1-py3-none-any.whl", hash = "sha256:5977a28dcaa2b1d14caa0e3ea28f6d7c16e393f1140088b22588200a8498657c", size = 53017, upload-time = "2026-01-27T06:05:52.48Z" }, -] [[package]] name = "linkedin-scraper-mcp" -version = "2.3.5" +version = "2.3.6" source = { editable = "." } dependencies = [ { name = "fastmcp" }, @@ -1055,7 +1051,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", specifier = ">=3.1.1" }, + { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=fix%2Frate-limit-false-positive" }, { name = "playwright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, From 3a295915d68addf45f90e136174eb80f6a377413 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 12 Feb 2026 07:50:12 +0000 Subject: [PATCH 354/565] chore: update manifest.json and docker-compose.yml to v2.3.6 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 36305cf9..e44afc69 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.5 + image: stickerdaniel/linkedin-mcp-server:2.3.6 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index f5fd3644..830265e7 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.5", + "version": "2.3.6", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.5", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.6", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.5" + "stickerdaniel/linkedin-mcp-server:2.3.6" ] } }, From f1331c78d6af7f3a534b8efeade1233717b668d8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Feb 2026 09:01:57 +0100 Subject: [PATCH 355/565] fix(docker): install git for git-based dependency resolution --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index 7bda0ac1..98e23460 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,6 +15,9 @@ RUN chown pwuser:pwuser /app # Copy project files with correct ownership COPY --chown=pwuser:pwuser . /app +# Install git (needed for git-based dependencies in pyproject.toml) +RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* + # Set Playwright browser install location ENV PLAYWRIGHT_BROWSERS_PATH=/opt/playwright # Install dependencies and Playwright with ONLY Chromium (not Firefox/WebKit) From d9decd018c8c692a77a5917568fa6570527411d6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Feb 2026 09:09:17 +0100 Subject: [PATCH 356/565] chore: bump version to 2.3.7 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f8eb562b..ebf9c79e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.6" +version = "2.3.7" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 85c89e1e..ea44b0fd 100644 --- a/uv.lock +++ b/uv.lock @@ -1024,7 +1024,7 @@ dependencies = [ [[package]] name = "linkedin-scraper-mcp" -version = "2.3.6" +version = "2.3.7" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 4a8c1eea5975847a8679687b53b23a6097e09e7e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 12 Feb 2026 08:10:12 +0000 Subject: [PATCH 357/565] chore: update manifest.json and docker-compose.yml to v2.3.7 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index e44afc69..b8468a88 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.6 + image: stickerdaniel/linkedin-mcp-server:2.3.7 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 830265e7..de7b57ae 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.6", + "version": "2.3.7", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.6", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.7", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.6" + "stickerdaniel/linkedin-mcp-server:2.3.7" ] } }, From 6337d88c6879d5e07cb107f9b80f9aecb4d8baa6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Feb 2026 10:46:06 +0100 Subject: [PATCH 358/565] feat!: Switch to patchright with persistent browser context Replace Playwright with Patchright (anti-detection fork) and use launch_persistent_context(user_data_dir=...) for full Chromium profile persistence. This fixes cross-platform session issues where sessions created on macOS failed in Docker (Linux, headless). BREAKING CHANGE: Old session.json files and LINKEDIN_COOKIE env var are no longer supported. Users must re-run --get-session to create a new persistent browser profile at ~/.linkedin-mcp/profile/. --- .env.example | 7 +-- AGENTS.md | 23 +++---- Dockerfile | 12 ++-- README.md | 81 +++++++++---------------- docs/docker-hub.md | 38 ++++-------- linkedin_mcp_server/authentication.py | 62 +++++++------------ linkedin_mcp_server/cli_main.py | 54 ++++++++--------- linkedin_mcp_server/config/loaders.py | 29 ++++----- linkedin_mcp_server/config/schema.py | 4 +- linkedin_mcp_server/drivers/__init__.py | 20 +++--- linkedin_mcp_server/drivers/browser.py | 67 +++++++------------- linkedin_mcp_server/error_handler.py | 10 +-- linkedin_mcp_server/exceptions.py | 16 +---- linkedin_mcp_server/setup.py | 48 +++++++-------- linkedin_mcp_server/utils/__init__.py | 7 --- pyproject.toml | 4 +- tests/conftest.py | 28 ++++----- tests/test_authentication.py | 37 ++++------- tests/test_config.py | 7 ++- tests/test_exceptions.py | 7 --- tests/test_utils.py | 12 ---- uv.lock | 48 +++++++-------- 22 files changed, 238 insertions(+), 383 deletions(-) delete mode 100644 tests/test_utils.py diff --git a/.env.example b/.env.example index 0eec0421..cb7141ea 100644 --- a/.env.example +++ b/.env.example @@ -1,10 +1,9 @@ # LinkedIn MCP Server Environment Variables # Copy this file to .env and fill in your values -# LinkedIn session cookie (optional) -# Either set this, or run with --get-session to log in with email/password. -# Extract from browser: DevTools > Application > Cookies > linkedin.com > li_at -LINKEDIN_COOKIE= +# Persistent browser profile directory (default: ~/.linkedin-mcp/profile) +# Run with --get-session to create a profile via browser login +USER_DATA_DIR=~/.linkedin-mcp/profile # Browser mode (default: true) # true = headless, false = visible window diff --git a/AGENTS.md b/AGENTS.md index 7829acfa..3d81c378 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -9,6 +9,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Use `uv` for dependency management: `uv sync` (installs all dependencies) - Development dependencies: `uv sync --group dev` - Bump version: `uv version --bump minor` (or `major`, `patch`) - this is the **only manual step** for a release. The GitHub Actions release workflow (`.github/workflows/release.yml`) automatically handles: manifest.json/docker-compose.yml version updates, git tag, Docker build & push, DXT extension, GitHub release, and PyPI publish. After the workflow completes, manually file a PR in the MCP registry to update the version. +- Install browser: `uv run patchright install chromium` - Run server locally: `uv run -m linkedin_mcp_server --no-headless` - Run via uvx (PyPI): `uvx linkedin-scraper-mcp` - Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` @@ -30,7 +31,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assistants to interact with LinkedIn through web scraping. The codebase follows a two-phase startup pattern: -1. **Authentication Phase** (`authentication.py`) - Validates LinkedIn session file exists +1. **Authentication Phase** (`authentication.py`) - Validates LinkedIn browser profile exists 2. **Server Runtime Phase** (`server.py`) - Runs FastMCP server with tool registration **Core Components:** @@ -38,9 +39,9 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis - `cli_main.py` - Entry point with CLI argument parsing and orchestration - `server.py` - FastMCP server setup and tool registration - `tools/` - LinkedIn scraping tools (person, company, job profiles) -- `drivers/browser.py` - Playwright browser management with session handling +- `drivers/browser.py` - Patchright browser management with persistent profile - `config/` - Configuration management (schema, loaders) -- `authentication.py` - LinkedIn session management +- `authentication.py` - LinkedIn profile-based authentication **Tool Categories:** @@ -61,8 +62,8 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Authentication Flow:** -- Uses session files stored at `~/.linkedin-mcp/session.json` -- Run with `--get-session` to create a session via browser login +- Uses persistent browser profile at `~/.linkedin-mcp/profile/` +- Run with `--get-session` to create a profile via browser login **Transport Modes:** @@ -73,20 +74,20 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis - **Python Version:** Requires Python 3.12+ - **Package Manager:** Uses `uv` for fast dependency resolution -- **Browser:** Uses Playwright with Chromium for browser automation +- **Browser:** Uses Patchright (anti-detection Playwright fork) with Chromium - **Logging:** Configurable levels, JSON format for non-interactive mode - **Error Handling:** Comprehensive exception handling for LinkedIn rate limits, captchas, etc. **Key Dependencies:** - `fastmcp` - MCP server framework -- `linkedin_scraper` - LinkedIn web scraping (v3 with Playwright) -- `playwright` - Browser automation +- `linkedin_scraper` - LinkedIn web scraping (v3 with Patchright) +- `patchright` - Anti-detection browser automation (Playwright fork) **Configuration:** - CLI arguments with comprehensive help (`--help`) -- Session stored at `~/.linkedin-mcp/session.json` +- Browser profile stored at `~/.linkedin-mcp/profile/` **Commit Message Format:** @@ -122,7 +123,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. -**Available resources**: fastmcp, linkedinScraper, playwright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit +**Available resources**: fastmcp, linkedinScraper, patchright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit ### Usage @@ -133,5 +134,5 @@ btca ask -r -q "" Use multiple `-r` flags to query multiple resources at once: ```bash -btca ask -r fastmcp -r playwright -q "How do I set up browser context with FastMCP tools?" +btca ask -r fastmcp -r patchright -q "How do I set up browser context with FastMCP tools?" ``` diff --git a/Dockerfile b/Dockerfile index 98e23460..f327018e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,13 +18,13 @@ COPY --chown=pwuser:pwuser . /app # Install git (needed for git-based dependencies in pyproject.toml) RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* -# Set Playwright browser install location -ENV PLAYWRIGHT_BROWSERS_PATH=/opt/playwright -# Install dependencies and Playwright with ONLY Chromium (not Firefox/WebKit) -# --with-deps installs required system dependencies (fonts, libraries) via apt (needs root) +# Set Patchright browser install location +ENV PATCHRIGHT_BROWSERS_PATH=/opt/patchright +# Install dependencies, system libs for Chromium, and patched Chromium binary RUN uv sync --frozen && \ - uv run playwright install --with-deps chromium && \ - chmod -R 755 /opt/playwright + uv run playwright install-deps chromium && \ + uv run patchright install chromium && \ + chmod -R 755 /opt/patchright # Fix ownership of app directory (venv created by uv) RUN chown -R pwuser:pwuser /app diff --git a/README.md b/README.md index a394087a..b22b5f31 100644 --- a/README.md +++ b/README.md @@ -48,14 +48,17 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `close_session` | Close browser session and clean up resources | Working | > [!WARNING] -> The session file at `~/.linkedin-mcp/session.json` contains sensitive authentication data. Keep it secure and do not share it. +> The browser profile at `~/.linkedin-mcp/profile/` contains sensitive authentication data. Keep it secure and do not share it. + +> [!IMPORTANT] +> **Breaking change in v3.0:** This version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile.

## ๐Ÿš€ uvx Setup (Recommended - Universal) -**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) and Playwright `uvx playwright install chromium` installed. +**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) and Patchright `uvx patchright install chromium` installed. ### Installation @@ -65,7 +68,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company uvx linkedin-scraper-mcp --get-session ``` -This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. +This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. **Step 2: Run the server** @@ -101,15 +104,16 @@ uvx linkedin-scraper-mcp **CLI Options:** -- `--get-session [PATH]` - Open browser to log in and save session (default: ~/.linkedin-mcp/session.json) +- `--get-session` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn session file +- `--clear-session` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +- `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--chrome-path PATH` - Path to Chrome/Chromium executable (for custom browser installations) **Basic Usage Examples:** @@ -149,7 +153,7 @@ uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 808 **Session issues:** -- Session is stored at `~/.linkedin-mcp/session.json` +- Browser profile is stored at `~/.linkedin-mcp/profile/` - Make sure you have only one active LinkedIn session at a time **Login issues:** @@ -177,45 +181,17 @@ uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 808 **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. -### Authentication Options - -Docker runs headless (no browser window), so you need to authenticate using one of these methods: - -#### Option 1: Cookie Authentication (Simplest) - -Get your LinkedIn `li_at` cookie and pass it to Docker: - -```json -{ - "mcpServers": { - "linkedin": { - "command": "docker", - "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "stickerdaniel/linkedin-mcp-server"], - "env": { - "LINKEDIN_COOKIE": "your_li_at_cookie_value" - } - } - } -} -``` - -**To get your `li_at` cookie:** - -1. Open LinkedIn in your browser in an **incognito** tab and log in -2. Open DevTools (F12) โ†’ Application โ†’ Cookies โ†’ linkedin.com -3. Copy the `li_at` cookie value - -#### Option 2: Session File (More Reliable) +### Authentication -Create a session file locally, then mount it into Docker. +Docker runs headless (no browser window), so you need to create a browser profile locally first and mount it into Docker. -**Step 1: Create session using uvx (one-time setup)** +**Step 1: Create profile using uvx (one-time setup)** ```bash uvx linkedin-scraper-mcp --get-session ``` -This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The session is saved to `~/.linkedin-mcp/session.json`. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. **Step 2: Configure Claude Desktop with Docker** @@ -235,12 +211,10 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again locally, or use a fresh `li_at` cookie. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again locally. > [!NOTE] -> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server. You have two options: -> 1. Create a session on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker -> 2. Pass your `li_at` cookie via `LINKEDIN_COOKIE` (if you encounter auth challenges, use option 1 instead) +> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. ### Docker Setup Help @@ -259,12 +233,13 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn session file +- `--clear-session` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) +- `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--chrome-path PATH` - Path to Chrome/Chromium executable (rarely needed in Docker) > [!NOTE] -> `--get-session` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create sessions. +> `--get-session` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create profiles. **HTTP Mode Example (for web-based MCP clients):** @@ -385,8 +360,8 @@ curl -LsSf https://astral.sh/uv/install.sh | sh uv sync uv sync --group dev -# 4. Install Playwright browser -uv run playwright install chromium +# 4. Install Patchright browser +uv run patchright install chromium # 5. Install pre-commit hooks uv run pre-commit install @@ -405,17 +380,17 @@ uv run -m linkedin_mcp_server **CLI Options:** -- `--get-session [PATH]` - Open browser to log in and save session (default: ~/.linkedin-mcp/session.json) +- `--get-session` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn session file +- `--clear-session` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--session-info` - Check if current session is valid and exit -- `--linkedin-cookie COOKIE` - LinkedIn session cookie (li_at) for authentication +- `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--slow-mo MS` - Delay between browser actions in milliseconds (default: 0, useful for debugging) - `--user-agent STRING` - Custom browser user agent - `--viewport WxH` - Browser viewport size (default: 1280x720) @@ -461,13 +436,13 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por **Session issues:** -- Session is stored in `~/.linkedin-mcp/session.json` -- Use `--clear-session` to clear the session and start fresh +- Browser profile is stored at `~/.linkedin-mcp/profile/` +- Use `--clear-session` to clear the profile and start fresh -**Python/Playwright issues:** +**Python/Patchright issues:** - Check Python version: `python --version` (should be 3.12+) -- Reinstall Playwright: `uv run playwright install chromium` +- Reinstall Patchright: `uv run patchright install chromium` - Reinstall dependencies: `uv sync --reinstall` **Timeout issues:** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 16128902..6f9cea6c 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -12,29 +12,15 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A ## Quick Start -### Option 1: Cookie Authentication (Simplest) +Create a browser profile locally, then mount it into Docker. -Pass your LinkedIn `li_at` cookie - session will be created and stored automatically. +**Step 1: Create profile using uvx (one-time setup)** -> **Note:** If you encounter authentication challenges, use Option 2 instead. - -```json -{ - "mcpServers": { - "linkedin": { - "command": "docker", - "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "stickerdaniel/linkedin-mcp-server"], - "env": { - "LINKEDIN_COOKIE": "your_li_at_cookie_value" - } - } - } -} +```bash +uvx linkedin-scraper-mcp --get-session ``` -### Option 2: Browser Login via uvx - -Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal), then mount it: +**Step 2: Configure Claude Desktop with Docker** ```json { @@ -51,13 +37,13 @@ Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin } ``` -> **Note:** Docker containers don't have a display server, so you can't use the `--get-session` command in Docker. +> **Note:** Docker containers don't have a display server, so you can't use the `--get-session` command in Docker. Create a profile on your host first. ## Environment Variables | Variable | Default | Description | |----------|---------|-------------| -| `LINKEDIN_COOKIE` | - | LinkedIn `li_at` session cookie (required if no session file) | +| `USER_DATA_DIR` | `~/.linkedin-mcp/profile` | Path to persistent browser profile directory | | `LOG_LEVEL` | `WARNING` | Logging level: DEBUG, INFO, WARNING, ERROR | | `TIMEOUT` | `5000` | Browser timeout in milliseconds | | `USER_AGENT` | - | Custom browser user agent | @@ -76,10 +62,12 @@ Create a session using the [uvx setup](https://github.com/stickerdaniel/linkedin "mcpServers": { "linkedin": { "command": "docker", - "args": ["run", "-i", "--rm", "-e", "LINKEDIN_COOKIE", "-e", "TIMEOUT=10000", "stickerdaniel/linkedin-mcp-server"], - "env": { - "LINKEDIN_COOKIE": "your_li_at_cookie_value" - } + "args": [ + "run", "-i", "--rm", + "-v", "~/.linkedin-mcp:/home/pwuser/.linkedin-mcp", + "-e", "TIMEOUT=10000", + "stickerdaniel/linkedin-mcp-server" + ] } } } diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 898e07a9..ec300036 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -1,82 +1,66 @@ """ Authentication logic for LinkedIn MCP Server. -Handles LinkedIn session management with file-based session persistence -and cookie-based authentication for Docker headless mode. +Handles LinkedIn session management with persistent browser profile. """ import logging +import shutil from pathlib import Path -from typing import Literal from linkedin_mcp_server.drivers.browser import ( - DEFAULT_SESSION_PATH, - session_exists, + DEFAULT_PROFILE_DIR, + profile_exists, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError -from linkedin_mcp_server.utils import get_linkedin_cookie logger = logging.getLogger(__name__) -AuthSource = Literal["session", "cookie"] - -def get_authentication_source() -> AuthSource: +def get_authentication_source() -> bool: """ - Check available authentication methods in priority order. - - Priority: - 1. Session file (most reliable) - 2. LINKEDIN_COOKIE env var (Docker headless) + Check if authentication is available via persistent profile. Returns: - String indicating auth source: "session" or "cookie" + True if profile exists Raises: CredentialsNotFoundError: If no authentication method available """ - # Priority 1: Session file - if session_exists(): - logger.info(f"Using session from {DEFAULT_SESSION_PATH}") - return "session" - - # Priority 2: Cookie from environment - if get_linkedin_cookie(): - logger.info("Using LINKEDIN_COOKIE from environment") - return "cookie" + if profile_exists(): + logger.info(f"Using persistent profile from {DEFAULT_PROFILE_DIR}") + return True raise CredentialsNotFoundError( "No LinkedIn authentication found.\n\n" "Options:\n" - " 1. Run with --get-session to create a session file (recommended)\n" - " 2. Set LINKEDIN_COOKIE environment variable with your li_at cookie\n" - " 3. Run with --no-headless to login interactively\n\n" + " 1. Run with --get-session to create a browser profile (recommended)\n" + " 2. Run with --no-headless to login interactively\n\n" "For Docker users:\n" - " Create session on host first: uvx linkedin-mcp-server --get-session\n" - " Then mount into Docker: -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp\n" - " Or set LINKEDIN_COOKIE environment variable: -e LINKEDIN_COOKIE=your_li_at" + " Create profile on host first: uvx linkedin-scraper-mcp --get-session\n" + " Then mount into Docker: -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp" ) -def clear_session(session_path: Path | None = None) -> bool: +def clear_profile(profile_dir: Path | None = None) -> bool: """ - Clear stored session file. + Clear stored browser profile directory. Args: - session_path: Path to session file + profile_dir: Path to profile directory Returns: True if clearing was successful """ - if session_path is None: - session_path = DEFAULT_SESSION_PATH + if profile_dir is None: + profile_dir = DEFAULT_PROFILE_DIR - if session_path.exists(): + if profile_dir.exists(): try: - session_path.unlink() - logger.info(f"Session cleared from {session_path}") + shutil.rmtree(profile_dir) + logger.info(f"Profile cleared from {profile_dir}") return True except OSError as e: - logger.warning(f"Could not clear session: {e}") + logger.warning(f"Could not clear profile: {e}") return False return True diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 17d17fb0..e294a68a 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -2,7 +2,7 @@ LinkedIn MCP Server - Main CLI application entry point. Implements a simplified two-phase startup: -1. Authentication Check - Verify session file is available +1. Authentication Check - Verify browser profile is available 2. Server Runtime - MCP server startup with transport selection """ @@ -18,16 +18,16 @@ from linkedin_scraper.core.exceptions import AuthenticationError, RateLimitError from linkedin_mcp_server.authentication import ( - clear_session, + clear_profile, get_authentication_source, ) from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.browser import ( - DEFAULT_SESSION_PATH, + DEFAULT_PROFILE_DIR, close_browser, get_or_create_browser, - session_exists, + profile_exists, set_headless, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError @@ -35,8 +35,6 @@ from linkedin_mcp_server.server import create_mcp_server from linkedin_mcp_server.setup import run_interactive_setup, run_session_creation -sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") - logger = logging.getLogger(__name__) @@ -62,7 +60,7 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: def clear_session_and_exit() -> None: - """Clear LinkedIn session and exit.""" + """Clear LinkedIn browser profile and exit.""" config = get_config() configure_logging( @@ -71,18 +69,18 @@ def clear_session_and_exit() -> None: ) version = get_version() - logger.info(f"LinkedIn MCP Server v{version} - Session Clear mode") + logger.info(f"LinkedIn MCP Server v{version} - Profile Clear mode") - if not session_exists(): - print("โ„น๏ธ No session file found") + if not profile_exists(): + print("โ„น๏ธ No browser profile found") print("Nothing to clear.") sys.exit(0) - print(f"๐Ÿ”‘ Clear LinkedIn session from {DEFAULT_SESSION_PATH}?") + print(f"๐Ÿ”‘ Clear LinkedIn browser profile from {DEFAULT_PROFILE_DIR}?") try: confirmation = ( - input("Are you sure you want to clear the session? (y/N): ").strip().lower() + input("Are you sure you want to clear the profile? (y/N): ").strip().lower() ) if confirmation not in ("y", "yes"): print("โŒ Operation cancelled") @@ -91,10 +89,10 @@ def clear_session_and_exit() -> None: print("\nโŒ Operation cancelled") sys.exit(0) - if clear_session(): - print("โœ… LinkedIn session cleared successfully!") + if clear_profile(): + print("โœ… LinkedIn browser profile cleared successfully!") else: - print("โŒ Failed to clear session") + print("โŒ Failed to clear profile") sys.exit(1) sys.exit(0) @@ -112,8 +110,8 @@ def get_session_and_exit() -> None: version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Session Creation mode") - output_path = config.server.session_output_path - success = run_session_creation(output_path) + user_data_dir = config.browser.user_data_dir + success = run_session_creation(user_data_dir) sys.exit(0 if success else 1) @@ -130,10 +128,10 @@ def session_info_and_exit() -> None: version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Session Info mode") - # Check if session file exists first - if not session_exists(): - print(f"โŒ No session file found at {DEFAULT_SESSION_PATH}") - print(" Run with --get-session to create a session") + # Check if profile directory exists first + if not profile_exists(): + print(f"โŒ No browser profile found at {DEFAULT_PROFILE_DIR}") + print(" Run with --get-session to create a profile") sys.exit(1) # Check if session is valid by testing login status @@ -151,10 +149,10 @@ async def check_session() -> bool: valid = asyncio.run(check_session()) if valid: - print(f"โœ… Session is valid: {DEFAULT_SESSION_PATH}") + print(f"โœ… Session is valid (profile: {DEFAULT_PROFILE_DIR})") sys.exit(0) else: - print(f"โŒ Session expired or invalid: {DEFAULT_SESSION_PATH}") + print(f"โŒ Session expired or invalid (profile: {DEFAULT_PROFILE_DIR})") print(" Run with --get-session to re-authenticate") sys.exit(1) @@ -163,7 +161,7 @@ def ensure_authentication_ready() -> None: """ Phase 1: Ensure authentication is ready. - Checks for existing session file. + Checks for existing browser profile. If not found, runs interactive setup in interactive mode. Raises: @@ -171,7 +169,7 @@ def ensure_authentication_ready() -> None: """ config = get_config() - # Check for existing session + # Check for existing profile try: get_authentication_source() return @@ -182,9 +180,9 @@ def ensure_authentication_ready() -> None: # No authentication found - try interactive setup if possible if not config.is_interactive: raise CredentialsNotFoundError( - "No LinkedIn session found.\n" + "No LinkedIn profile found.\n" "Options:\n" - " 1. Run with --get-session to create a session\n" + " 1. Run with --get-session to create a profile\n" " 2. Run with --no-headless to login interactively" ) @@ -214,6 +212,8 @@ def get_version() -> str: def main() -> None: """Main application entry point.""" + sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") + config = get_config() # Configure logging diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index b01df598..bf53ada0 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -38,7 +38,6 @@ class EnvironmentKeys: HEADLESS = "HEADLESS" LOG_LEVEL = "LOG_LEVEL" TRANSPORT = "TRANSPORT" - LINKEDIN_COOKIE = "LINKEDIN_COOKIE" TIMEOUT = "TIMEOUT" USER_AGENT = "USER_AGENT" HOST = "HOST" @@ -47,6 +46,7 @@ class EnvironmentKeys: SLOW_MO = "SLOW_MO" VIEWPORT = "VIEWPORT" CHROME_PATH = "CHROME_PATH" + USER_DATA_DIR = "USER_DATA_DIR" def is_interactive_environment() -> bool: @@ -91,9 +91,9 @@ def load_from_env(config: AppConfig) -> AppConfig: f"Invalid TRANSPORT: '{transport_env}'. Must be 'stdio' or 'streamable-http'." ) - # LinkedIn cookie for headless auth - if cookie := os.environ.get(EnvironmentKeys.LINKEDIN_COOKIE): - config.server.linkedin_cookie = cookie + # Persistent browser profile directory + if user_data_dir := os.environ.get(EnvironmentKeys.USER_DATA_DIR): + config.browser.user_data_dir = user_data_dir # Timeout for page operations (semantic validation in BrowserConfig.__post_init__) if timeout_env := os.environ.get(EnvironmentKeys.TIMEOUT): @@ -239,11 +239,8 @@ def load_from_args(config: AppConfig) -> AppConfig: # Session management parser.add_argument( "--get-session", - nargs="?", - const="~/.linkedin-mcp/session.json", - default=None, - metavar="PATH", - help="Login interactively and save session (default: ~/.linkedin-mcp/session.json)", + action="store_true", + help="Login interactively via browser and save persistent profile", ) parser.add_argument( @@ -255,14 +252,15 @@ def load_from_args(config: AppConfig) -> AppConfig: parser.add_argument( "--clear-session", action="store_true", - help="Clear stored LinkedIn session file", + help="Clear stored LinkedIn browser profile", ) parser.add_argument( - "--linkedin-cookie", + "--user-data-dir", type=str, default=None, - help="LinkedIn session cookie (li_at) for authentication", + metavar="PATH", + help="Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile)", ) args = parser.parse_args() @@ -312,9 +310,8 @@ def load_from_args(config: AppConfig) -> AppConfig: config.browser.chrome_path = args.chrome_path # Session management - if args.get_session is not None: + if args.get_session: config.server.get_session = True - config.server.session_output_path = args.get_session if args.session_info: config.server.session_info = True @@ -322,8 +319,8 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.clear_session: config.server.clear_session = True - if args.linkedin_cookie: - config.server.linkedin_cookie = args.linkedin_cookie + if args.user_data_dir: + config.browser.user_data_dir = args.user_data_dir return config diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 6afaf9fd..89490fe4 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -27,6 +27,7 @@ class BrowserConfig: viewport_height: int = 720 default_timeout: int = 5000 # Milliseconds for page operations chrome_path: str | None = None # Path to Chrome/Chromium executable + user_data_dir: str = "~/.linkedin-mcp/profile" # Persistent browser profile def validate(self) -> None: """Validate browser configuration values.""" @@ -62,15 +63,12 @@ class ServerConfig: transport_explicitly_set: bool = False log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "WARNING" get_session: bool = False - session_output_path: str | None = None session_info: bool = False # Check session validity and exit clear_session: bool = False # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 path: str = "/mcp" - # Cookie authentication - linkedin_cookie: str | None = None @dataclass diff --git a/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py index 288f89f9..98b2b888 100644 --- a/linkedin_mcp_server/drivers/__init__.py +++ b/linkedin_mcp_server/drivers/__init__.py @@ -1,38 +1,38 @@ """ Browser management package for LinkedIn scraping. -This package provides Playwright browser management using linkedin_scraper v3's -BrowserManager. It implements a singleton pattern for browser instances -to ensure session persistence across multiple tool calls while handling -authentication, session management, and proper resource cleanup. +This package provides Patchright browser management using linkedin_scraper v3's +BrowserManager with persistent context. It implements a singleton pattern for +browser instances to ensure profile persistence across multiple tool calls +while handling authentication and proper resource cleanup. Key Components: -- Playwright browser initialization via BrowserManager -- LinkedIn authentication with session persistence +- Patchright browser initialization via BrowserManager with persistent profile +- LinkedIn authentication with automatic profile persistence - Singleton pattern for browser reuse across tools - Automatic cleanup and resource management """ from linkedin_mcp_server.drivers.browser import ( - DEFAULT_SESSION_PATH, + DEFAULT_PROFILE_DIR, check_rate_limit, close_browser, ensure_authenticated, get_or_create_browser, + profile_exists, reset_browser_for_testing, - session_exists, set_headless, validate_session, ) __all__ = [ - "DEFAULT_SESSION_PATH", + "DEFAULT_PROFILE_DIR", "check_rate_limit", "close_browser", "ensure_authenticated", "get_or_create_browser", + "profile_exists", "reset_browser_for_testing", - "session_exists", "set_headless", "validate_session", ] diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 6423c257..57bcbfd8 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -1,9 +1,9 @@ """ -Playwright browser management for LinkedIn scraping. +Patchright browser management for LinkedIn scraping. This module provides async browser lifecycle management using linkedin_scraper v3's -BrowserManager. Implements a singleton pattern for browser reuse across tool calls -with session persistence via JSON files. +BrowserManager with persistent context. Implements a singleton pattern for browser +reuse across tool calls with automatic profile persistence. """ import logging @@ -13,18 +13,16 @@ AuthenticationError, BrowserManager, is_logged_in, - login_with_cookie, ) from linkedin_scraper.core import detect_rate_limit from linkedin_mcp_server.config import get_config -from linkedin_mcp_server.utils import get_linkedin_cookie logger = logging.getLogger(__name__) -# Default session file location -DEFAULT_SESSION_PATH = Path.home() / ".linkedin-mcp" / "session.json" +# Default persistent profile directory +DEFAULT_PROFILE_DIR = Path.home() / ".linkedin-mcp" / "profile" # Global browser instance (singleton) _browser: BrowserManager | None = None @@ -39,33 +37,32 @@ def _apply_browser_settings(browser: BrowserManager) -> None: async def get_or_create_browser( headless: bool | None = None, - session_path: Path | None = None, ) -> BrowserManager: """ Get existing browser or create and initialize a new one. Uses a singleton pattern to reuse the browser across tool calls. - Loads session from file if available. + Uses persistent context for automatic profile persistence. Args: headless: Run browser in headless mode. Defaults to config value. - session_path: Path to session file. Defaults to ~/.linkedin-mcp/session.json Returns: Initialized BrowserManager instance + + Raises: + AuthenticationError: If no valid authentication found """ global _browser, _headless if headless is not None: _headless = headless - if session_path is None: - session_path = DEFAULT_SESSION_PATH - if _browser is not None: return _browser config = get_config() + user_data_dir = Path(config.browser.user_data_dir).expanduser() viewport = { "width": config.browser.viewport_width, "height": config.browser.viewport_height, @@ -78,13 +75,15 @@ async def get_or_create_browser( logger.info("Using custom Chrome path: %s", config.browser.chrome_path) logger.info( - "Creating new browser (headless=%s, slow_mo=%sms, viewport=%sx%s)", + "Creating new browser (headless=%s, slow_mo=%sms, viewport=%sx%s, profile=%s)", _headless, config.browser.slow_mo, viewport["width"], viewport["height"], + user_data_dir, ) _browser = BrowserManager( + user_data_dir=user_data_dir, headless=_headless, slow_mo=config.browser.slow_mo, user_agent=config.browser.user_agent, @@ -93,31 +92,11 @@ async def get_or_create_browser( ) await _browser.start() - # Priority 1: Load session file if available - if session_path.exists(): - try: - await _browser.load_session(str(session_path)) - logger.info(f"Loaded session from {session_path}") - # Navigate to LinkedIn to validate session - await _browser.page.goto("https://www.linkedin.com/feed/") - if await is_logged_in(_browser.page): - _apply_browser_settings(_browser) - return _browser - logger.warning( - "Session loaded but expired, trying to create session from cookie" - ) - except Exception as e: - logger.warning(f"Failed to load session: {e}") - - # Priority 2: Use cookie from environment - if cookie := get_linkedin_cookie(): - try: - await login_with_cookie(_browser.page, cookie) - logger.info("Authenticated using LINKEDIN_COOKIE") - _apply_browser_settings(_browser) - return _browser - except Exception as e: - logger.warning(f"Cookie authentication failed: {e}") + # Navigate to LinkedIn to check authentication + await _browser.page.goto("https://www.linkedin.com/feed/") + if await is_logged_in(_browser.page): + _apply_browser_settings(_browser) + return _browser # No auth available - fail fast with clear error raise AuthenticationError( @@ -136,11 +115,11 @@ async def close_browser() -> None: logger.info("Browser closed") -def session_exists(session_path: Path | None = None) -> bool: - """Check if a session file exists.""" - if session_path is None: - session_path = DEFAULT_SESSION_PATH - return session_path.exists() +def profile_exists(profile_dir: Path | None = None) -> bool: + """Check if a persistent browser profile exists and is non-empty.""" + if profile_dir is None: + profile_dir = DEFAULT_PROFILE_DIR + return profile_dir.exists() and any(profile_dir.iterdir()) def set_headless(headless: bool) -> None: diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 31965e74..a3eb1cdb 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -19,7 +19,6 @@ ) from linkedin_mcp_server.exceptions import ( - CookieAuthenticationError, CredentialsNotFoundError, LinkedInMCPError, SessionExpiredError, @@ -69,18 +68,11 @@ def convert_exception_to_response( "resolution": "Run with --get-session to create a new session", } - elif isinstance(exception, CookieAuthenticationError): - return { - "error": "cookie_auth_failed", - "message": str(exception), - "resolution": "Check your LINKEDIN_COOKIE value or create a session file", - } - elif isinstance(exception, AuthenticationError): return { "error": "authentication_failed", "message": str(exception), - "resolution": "Run with --get-session to re-authenticate (opens visible browser, not available in Docker), or set LINKEDIN_COOKIE environment variable.", + "resolution": "Run with --get-session to re-authenticate.", } elif isinstance(exception, RateLimitError): diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py index 54dffe79..25c38ca2 100644 --- a/linkedin_mcp_server/exceptions.py +++ b/linkedin_mcp_server/exceptions.py @@ -26,20 +26,6 @@ def __init__(self, message: str | None = None): default_msg = ( "LinkedIn session has expired.\n\n" "To fix this:\n" - " 1. Run with --get-session to create a new session\n" - " 2. Or set a fresh LINKEDIN_COOKIE environment variable" - ) - super().__init__(message or default_msg) - - -class CookieAuthenticationError(LinkedInMCPError): - """Cookie-based authentication failed.""" - - def __init__(self, message: str | None = None): - default_msg = ( - "Cookie authentication failed. The cookie may be:\n" - " - Expired (cookies typically last 1-7 days)\n" - " - Invalid (check the format)\n" - " - From a different account" + " Run with --get-session to create a new session" ) super().__init__(message or default_msg) diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index b90c6fa2..df82f631 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -1,8 +1,8 @@ """ Interactive setup flows for LinkedIn MCP Server authentication. -Handles session creation through interactive browser login using Playwright. -Uses linkedin_scraper v3's wait_for_manual_login for authentication. +Handles session creation through interactive browser login using Patchright +with persistent context. Profile state auto-persists to user_data_dir. """ import asyncio @@ -12,38 +12,39 @@ from linkedin_scraper import BrowserManager, wait_for_manual_login from linkedin_scraper.core import warm_up_browser -from linkedin_mcp_server.drivers.browser import DEFAULT_SESSION_PATH +from linkedin_mcp_server.drivers.browser import DEFAULT_PROFILE_DIR logger = logging.getLogger(__name__) -async def interactive_login_and_save( - session_path: Path | None = None, warm_up: bool = True +async def interactive_login( + user_data_dir: Path | None = None, warm_up: bool = True ) -> bool: """ - Open browser for manual LinkedIn login and save session. + Open browser for manual LinkedIn login with persistent profile. Opens a non-headless browser, navigates to LinkedIn login page, and waits for user to complete authentication (including 2FA, captcha, etc.). + Profile state auto-persists to user_data_dir. Args: - session_path: Path to save session. Defaults to ~/.linkedin-mcp/session.json + user_data_dir: Path to browser profile. Defaults to ~/.linkedin-mcp/profile warm_up: Visit normal sites first to appear more human-like (default: True) Returns: - True if login was successful and session was saved + True if login was successful Raises: Exception: If login fails or times out """ - if session_path is None: - session_path = DEFAULT_SESSION_PATH + if user_data_dir is None: + user_data_dir = DEFAULT_PROFILE_DIR print("Opening browser for LinkedIn login...") print(" Please log in manually. You have 5 minutes to complete authentication.") print(" (This handles 2FA, captcha, and any security challenges)") - async with BrowserManager(headless=False) as browser: + async with BrowserManager(user_data_dir=user_data_dir, headless=False) as browser: # Warm up browser to appear more human-like and avoid security checkpoints if warm_up: print(" Warming up browser (visiting normal sites first)...") @@ -56,35 +57,30 @@ async def interactive_login_and_save( # 5 minute timeout (300000ms) allows time for 2FA, captcha, security challenges await wait_for_manual_login(browser.page, timeout=300000) - # Save session for future use - session_path.parent.mkdir(parents=True, exist_ok=True) - await browser.save_session(str(session_path)) - - print(f"Session saved to {session_path}") + print(f"Profile saved to {user_data_dir}") return True -def run_session_creation(output_path: str | None = None) -> bool: +def run_session_creation(user_data_dir: str | None = None) -> bool: """ - Create session via interactive login and save to file. + Create session via interactive login with persistent profile. Args: - output_path: Path to save session file. Defaults to ~/.linkedin-mcp/session.json + user_data_dir: Path to profile directory. Defaults to ~/.linkedin-mcp/profile Returns: True if session was created successfully """ - # Expand ~ in path - if output_path: - session_path = Path(output_path).expanduser() + if user_data_dir: + profile_dir = Path(user_data_dir).expanduser() else: - session_path = DEFAULT_SESSION_PATH + profile_dir = DEFAULT_PROFILE_DIR print("LinkedIn MCP Server - Session Creation") - print(f" Session will be saved to: {session_path}") + print(f" Profile will be saved to: {profile_dir}") try: - success = asyncio.run(interactive_login_and_save(session_path)) + success = asyncio.run(interactive_login(profile_dir)) return success except Exception as e: print(f"Session creation failed: {e}") @@ -102,7 +98,7 @@ def run_interactive_setup() -> bool: print(" Opening browser for manual login...") try: - return asyncio.run(interactive_login_and_save()) + return asyncio.run(interactive_login()) except Exception as e: print(f"Login failed: {e}") return False diff --git a/linkedin_mcp_server/utils/__init__.py b/linkedin_mcp_server/utils/__init__.py index 6ec0b995..2785acad 100644 --- a/linkedin_mcp_server/utils/__init__.py +++ b/linkedin_mcp_server/utils/__init__.py @@ -1,8 +1 @@ """Utility functions for LinkedIn MCP Server.""" - -import os - - -def get_linkedin_cookie() -> str | None: - """Get LinkedIn cookie from environment variable.""" - return os.environ.get("LINKEDIN_COOKIE") diff --git a/pyproject.toml b/pyproject.toml index ebf9c79e..8c399233 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", "linkedin-scraper", - "playwright>=1.40.0", + "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", ] @@ -63,7 +63,7 @@ exclude = ["assets*", "docs*", "tests*"] linkedin_mcp_server = ["py.typed"] [tool.uv.sources] -linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "fix/rate-limit-false-positive" } +linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "feat/patchright-persistent-context" } [dependency-groups] dev = [ diff --git a/tests/conftest.py b/tests/conftest.py index 0cd50c3e..14b1b520 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,3 @@ -import json - import pytest @@ -17,9 +15,9 @@ def reset_singletons(): @pytest.fixture(autouse=True) -def isolate_session_path(tmp_path, monkeypatch): - """Redirect DEFAULT_SESSION_PATH to tmp_path.""" - fake_session = tmp_path / "session.json" +def isolate_profile_dir(tmp_path, monkeypatch): + """Redirect DEFAULT_PROFILE_DIR to tmp_path.""" + fake_profile = tmp_path / "profile" for module in [ "linkedin_mcp_server.drivers.browser", "linkedin_mcp_server.authentication", @@ -27,22 +25,22 @@ def isolate_session_path(tmp_path, monkeypatch): "linkedin_mcp_server.setup", ]: try: - monkeypatch.setattr(f"{module}.DEFAULT_SESSION_PATH", fake_session) + monkeypatch.setattr(f"{module}.DEFAULT_PROFILE_DIR", fake_profile) except AttributeError: pass # Module may not be imported yet - return fake_session + return fake_profile @pytest.fixture -def session_file(isolate_session_path): - """Create valid session file.""" - isolate_session_path.parent.mkdir(parents=True, exist_ok=True) - isolate_session_path.write_text( - json.dumps( - {"cookies": [{"name": "li_at", "value": "test", "domain": ".linkedin.com"}]} - ) +def profile_dir(isolate_profile_dir): + """Create a non-empty profile directory.""" + isolate_profile_dir.mkdir(parents=True, exist_ok=True) + # Create a marker file so profile_exists() returns True + (isolate_profile_dir / "Default" / "Cookies").parent.mkdir( + parents=True, exist_ok=True ) - return isolate_session_path + (isolate_profile_dir / "Default" / "Cookies").write_text("placeholder") + return isolate_profile_dir @pytest.fixture diff --git a/tests/test_authentication.py b/tests/test_authentication.py index aa03009a..e02e9f2b 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -1,44 +1,31 @@ import pytest -from linkedin_mcp_server.authentication import clear_session, get_authentication_source +from linkedin_mcp_server.authentication import clear_profile, get_authentication_source from linkedin_mcp_server.exceptions import CredentialsNotFoundError -def test_get_auth_source_session(session_file, monkeypatch): +def test_get_auth_source_profile(profile_dir, monkeypatch): monkeypatch.setattr( - "linkedin_mcp_server.authentication.session_exists", lambda: True + "linkedin_mcp_server.authentication.profile_exists", lambda: True ) - assert get_authentication_source() == "session" - - -def test_get_auth_source_cookie(monkeypatch): - monkeypatch.setattr( - "linkedin_mcp_server.authentication.session_exists", lambda: False - ) - monkeypatch.setattr( - "linkedin_mcp_server.authentication.get_linkedin_cookie", lambda: "cookie" - ) - assert get_authentication_source() == "cookie" + assert get_authentication_source() is True def test_get_auth_source_none_raises(monkeypatch): monkeypatch.setattr( - "linkedin_mcp_server.authentication.session_exists", lambda: False - ) - monkeypatch.setattr( - "linkedin_mcp_server.authentication.get_linkedin_cookie", lambda: None + "linkedin_mcp_server.authentication.profile_exists", lambda: False ) with pytest.raises(CredentialsNotFoundError): get_authentication_source() -def test_clear_session_removes_file(session_file): - assert session_file.exists() - result = clear_session(session_file) +def test_clear_profile_removes_dir(profile_dir): + assert profile_dir.exists() + result = clear_profile(profile_dir) assert result is True - assert not session_file.exists() + assert not profile_dir.exists() -def test_clear_session_no_file(isolate_session_path): - result = clear_session(isolate_session_path) - assert result is True # No error even if file doesn't exist +def test_clear_profile_no_dir(isolate_profile_dir): + result = clear_profile(isolate_profile_dir) + assert result is True # No error even if dir doesn't exist diff --git a/tests/test_config.py b/tests/test_config.py index 55f0b387..48b78fd4 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -13,6 +13,7 @@ def test_defaults(self): config = BrowserConfig() assert config.headless is True assert config.default_timeout == 5000 + assert config.user_data_dir == "~/.linkedin-mcp/profile" def test_validate_passes(self): BrowserConfig().validate() # No error @@ -149,9 +150,9 @@ def test_load_from_env_invalid_viewport(self, monkeypatch): with pytest.raises(ConfigurationError, match="Invalid VIEWPORT"): load_from_env(AppConfig()) - def test_load_from_env_linkedin_cookie(self, monkeypatch): - monkeypatch.setenv("LINKEDIN_COOKIE", "test_cookie_value") + def test_load_from_env_user_data_dir(self, monkeypatch): + monkeypatch.setenv("USER_DATA_DIR", "/custom/profile") from linkedin_mcp_server.config.loaders import load_from_env config = load_from_env(AppConfig()) - assert config.server.linkedin_cookie == "test_cookie_value" + assert config.browser.user_data_dir == "/custom/profile" diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 657e7455..bcee0a9c 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,5 +1,4 @@ from linkedin_mcp_server.exceptions import ( - CookieAuthenticationError, CredentialsNotFoundError, LinkedInMCPError, SessionExpiredError, @@ -21,12 +20,6 @@ def test_session_expired_custom_message(): assert str(err) == "custom" -def test_cookie_auth_default_message(): - err = CookieAuthenticationError() - assert "cookie" in str(err).lower() - - def test_inheritance(): assert issubclass(SessionExpiredError, LinkedInMCPError) - assert issubclass(CookieAuthenticationError, LinkedInMCPError) assert issubclass(CredentialsNotFoundError, LinkedInMCPError) diff --git a/tests/test_utils.py b/tests/test_utils.py deleted file mode 100644 index d660283c..00000000 --- a/tests/test_utils.py +++ /dev/null @@ -1,12 +0,0 @@ -def test_get_linkedin_cookie_present(monkeypatch): - monkeypatch.setenv("LINKEDIN_COOKIE", "test_cookie") - from linkedin_mcp_server.utils import get_linkedin_cookie - - assert get_linkedin_cookie() == "test_cookie" - - -def test_get_linkedin_cookie_missing(monkeypatch): - monkeypatch.delenv("LINKEDIN_COOKIE", raising=False) - from linkedin_mcp_server.utils import get_linkedin_cookie - - assert get_linkedin_cookie() is None diff --git a/uv.lock b/uv.lock index ea44b0fd..9ee81afa 100644 --- a/uv.lock +++ b/uv.lock @@ -1012,11 +1012,11 @@ wheels = [ [[package]] name = "linkedin-scraper" version = "3.1.1" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=fix%2Frate-limit-false-positive#092aef732a1a276b61052e5cefccfcfea0c3695d" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context#5505ad45bf772fa33a945f651ceed8c3675517db" } dependencies = [ { name = "aiofiles" }, { name = "lxml" }, - { name = "playwright" }, + { name = "patchright" }, { name = "pydantic" }, { name = "python-dotenv" }, { name = "requests" }, @@ -1030,7 +1030,7 @@ dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, { name = "linkedin-scraper" }, - { name = "playwright" }, + { name = "patchright" }, { name = "pyperclip" }, { name = "python-dotenv" }, ] @@ -1051,8 +1051,8 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=fix%2Frate-limit-false-positive" }, - { name = "playwright", specifier = ">=1.40.0" }, + { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context" }, + { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, ] @@ -1454,6 +1454,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] +[[package]] +name = "patchright" +version = "1.58.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/c6/b1d685ccce237e280d8549454a8b5760e58ab5ee88af9ef875fad2282845/patchright-1.58.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:caadeec5b4812f12db5e245e78b7c1bdd9c6b38d2c15a59fa3047b04e33a3e60", size = 42229561, upload-time = "2026-01-30T15:26:54.532Z" }, + { url = "https://files.pythonhosted.org/packages/61/13/e5726d38be9ecf9ed714346433f2536eb6423748836f4a22a6701b992ba0/patchright-1.58.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:af567d94d2d735be8fa88c6ff9418e46361d823f7b28c10c2823e51942739507", size = 41018089, upload-time = "2026-01-30T15:26:58.097Z" }, + { url = "https://files.pythonhosted.org/packages/6c/33/db35661268edc03381bbf61dcb3119f427591562ce45dce90d17e116ffb5/patchright-1.58.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:ccf8116a97dcef6e3865c9823f51965db069c931346afe5253e25d9486160a92", size = 42229561, upload-time = "2026-01-30T15:27:02.073Z" }, + { url = "https://files.pythonhosted.org/packages/ea/86/98d8f42d5186b6864144fb25e21da8aa7cffa5b9d1d76752276610b9ea58/patchright-1.58.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:832bee2fe48cf9dc07bb3b0f0d05eee923203f348cd98b14c2c515eece326734", size = 46213732, upload-time = "2026-01-30T15:27:06.187Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b1/7094545c805a31235ef69316ccc910aa5ff5e940c41e85df588ca660f00d/patchright-1.58.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:431b1df8967b4919d326a3121445c47f15769bc6a10dcebaa699073eb7d125f9", size = 45942677, upload-time = "2026-01-30T15:27:09.981Z" }, + { url = "https://files.pythonhosted.org/packages/4a/11/e21a51c42969473237c92a47d5433b2c58db1ec2bbd3b340ddeb33ac718f/patchright-1.58.0-py3-none-win32.whl", hash = "sha256:5529f66d296e2894789c309a13750b1a20f468daeb7de511f91bbf54cac95d95", size = 36794461, upload-time = "2026-01-30T15:27:13.409Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/b7dff0669ce8814c690c67eee1b44b3cdb422593efbbbbc4bfe3bf10f9fa/patchright-1.58.0-py3-none-win_amd64.whl", hash = "sha256:e37109834056feb8e4e4918fb259d497dbfc37e03f9391c0d3cf1532f5fa9b7f", size = 36794467, upload-time = "2026-01-30T15:27:16.613Z" }, + { url = "https://files.pythonhosted.org/packages/91/2a/81ef2b079bbc925a935f2fd73dc1285c46c7eb35c5032a0d63b48d753c4a/patchright-1.58.0-py3-none-win_arm64.whl", hash = "sha256:b044efea1774beac8ee033583eac7181b86ea450da3a36d3039d7a1a428ac098", size = 33064382, upload-time = "2026-01-30T15:27:19.725Z" }, +] + [[package]] name = "pathable" version = "0.4.4" @@ -1481,25 +1500,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] -[[package]] -name = "playwright" -version = "1.57.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "greenlet" }, - { name = "pyee" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/b6/e17543cea8290ae4dced10be21d5a43c360096aa2cce0aa7039e60c50df3/playwright-1.57.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:9351c1ac3dfd9b3820fe7fc4340d96c0d3736bb68097b9b7a69bd45d25e9370c", size = 41985039, upload-time = "2025-12-09T08:06:18.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/04/ef95b67e1ff59c080b2effd1a9a96984d6953f667c91dfe9d77c838fc956/playwright-1.57.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4a9d65027bce48eeba842408bcc1421502dfd7e41e28d207e94260fa93ca67e", size = 40775575, upload-time = "2025-12-09T08:06:22.105Z" }, - { url = "https://files.pythonhosted.org/packages/60/bd/5563850322a663956c927eefcf1457d12917e8f118c214410e815f2147d1/playwright-1.57.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:99104771abc4eafee48f47dac2369e0015516dc1ce8c409807d2dd440828b9a4", size = 41985042, upload-time = "2025-12-09T08:06:25.357Z" }, - { url = "https://files.pythonhosted.org/packages/56/61/3a803cb5ae0321715bfd5247ea871d25b32c8f372aeb70550a90c5f586df/playwright-1.57.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:284ed5a706b7c389a06caa431b2f0ba9ac4130113c3a779767dda758c2497bb1", size = 45975252, upload-time = "2025-12-09T08:06:29.186Z" }, - { url = "https://files.pythonhosted.org/packages/83/d7/b72eb59dfbea0013a7f9731878df8c670f5f35318cedb010c8a30292c118/playwright-1.57.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a1bae6c0a07839cdeaddbc0756b3b2b85e476c07945f64ece08f1f956a86f1", size = 45706917, upload-time = "2025-12-09T08:06:32.549Z" }, - { url = "https://files.pythonhosted.org/packages/e4/09/3fc9ebd7c95ee54ba6a68d5c0bc23e449f7235f4603fc60534a364934c16/playwright-1.57.0-py3-none-win32.whl", hash = "sha256:1dd93b265688da46e91ecb0606d36f777f8eadcf7fbef12f6426b20bf0c9137c", size = 36553860, upload-time = "2025-12-09T08:06:35.864Z" }, - { url = "https://files.pythonhosted.org/packages/58/d4/dcdfd2a33096aeda6ca0d15584800443dd2be64becca8f315634044b135b/playwright-1.57.0-py3-none-win_amd64.whl", hash = "sha256:6caefb08ed2c6f29d33b8088d05d09376946e49a73be19271c8cd5384b82b14c", size = 36553864, upload-time = "2025-12-09T08:06:38.915Z" }, - { url = "https://files.pythonhosted.org/packages/6a/60/fe31d7e6b8907789dcb0584f88be741ba388413e4fbce35f1eba4e3073de/playwright-1.57.0-py3-none-win_arm64.whl", hash = "sha256:5f065f5a133dbc15e6e7c71e7bc04f258195755b1c32a432b792e28338c8335e", size = 32837940, upload-time = "2025-12-09T08:06:42.268Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" From 48d9547dc53867d2adf4bf7097ed2962c731c561 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Feb 2026 14:53:46 +0100 Subject: [PATCH 359/565] polish the implementation --- Dockerfile | 6 +- README.md | 2 +- linkedin_mcp_server/authentication.py | 9 +- linkedin_mcp_server/cli_main.py | 55 ++++++----- linkedin_mcp_server/config/loaders.py | 10 +- linkedin_mcp_server/config/schema.py | 2 - linkedin_mcp_server/drivers/__init__.py | 2 + linkedin_mcp_server/drivers/browser.py | 26 ++++-- linkedin_mcp_server/error_handler.py | 4 +- linkedin_mcp_server/setup.py | 23 ++--- tests/conftest.py | 17 +++- tests/test_authentication.py | 38 +++++++- tests/test_browser_driver.py | 117 ++++++++++++++++++++++++ 13 files changed, 247 insertions(+), 64 deletions(-) create mode 100644 tests/test_browser_driver.py diff --git a/Dockerfile b/Dockerfile index f327018e..323e0ef9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,11 +18,11 @@ COPY --chown=pwuser:pwuser . /app # Install git (needed for git-based dependencies in pyproject.toml) RUN apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* -# Set Patchright browser install location -ENV PATCHRIGHT_BROWSERS_PATH=/opt/patchright +# Set browser install location (Patchright reads PLAYWRIGHT_BROWSERS_PATH internally) +ENV PLAYWRIGHT_BROWSERS_PATH=/opt/patchright # Install dependencies, system libs for Chromium, and patched Chromium binary RUN uv sync --frozen && \ - uv run playwright install-deps chromium && \ + uv run patchright install-deps chromium && \ uv run patchright install chromium && \ chmod -R 755 /opt/patchright diff --git a/README.md b/README.md index b22b5f31..557107cd 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company > The browser profile at `~/.linkedin-mcp/profile/` contains sensitive authentication data. Keep it secure and do not share it. > [!IMPORTANT] -> **Breaking change in v3.0:** This version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile. +> **Breaking change:** This version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile.

diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index ec300036..0f79c80c 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -9,7 +9,7 @@ from pathlib import Path from linkedin_mcp_server.drivers.browser import ( - DEFAULT_PROFILE_DIR, + get_profile_dir, profile_exists, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError @@ -27,8 +27,9 @@ def get_authentication_source() -> bool: Raises: CredentialsNotFoundError: If no authentication method available """ - if profile_exists(): - logger.info(f"Using persistent profile from {DEFAULT_PROFILE_DIR}") + profile_dir = get_profile_dir() + if profile_exists(profile_dir): + logger.info(f"Using persistent profile from {profile_dir}") return True raise CredentialsNotFoundError( @@ -53,7 +54,7 @@ def clear_profile(profile_dir: Path | None = None) -> bool: True if clearing was successful """ if profile_dir is None: - profile_dir = DEFAULT_PROFILE_DIR + profile_dir = get_profile_dir() if profile_dir.exists(): try: diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index e294a68a..8de0be90 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -24,16 +24,16 @@ from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.browser import ( - DEFAULT_PROFILE_DIR, close_browser, get_or_create_browser, + get_profile_dir, profile_exists, set_headless, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError from linkedin_mcp_server.logging_config import configure_logging from linkedin_mcp_server.server import create_mcp_server -from linkedin_mcp_server.setup import run_interactive_setup, run_session_creation +from linkedin_mcp_server.setup import run_interactive_setup, run_profile_creation logger = logging.getLogger(__name__) @@ -59,7 +59,7 @@ def choose_transport_interactive() -> Literal["stdio", "streamable-http"]: return answers["transport"] -def clear_session_and_exit() -> None: +def clear_profile_and_exit() -> None: """Clear LinkedIn browser profile and exit.""" config = get_config() @@ -71,12 +71,14 @@ def clear_session_and_exit() -> None: version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Profile Clear mode") - if not profile_exists(): + profile_dir = get_profile_dir() + + if not profile_exists(profile_dir): print("โ„น๏ธ No browser profile found") print("Nothing to clear.") sys.exit(0) - print(f"๐Ÿ”‘ Clear LinkedIn browser profile from {DEFAULT_PROFILE_DIR}?") + print(f"๐Ÿ”‘ Clear LinkedIn browser profile from {profile_dir}?") try: confirmation = ( @@ -89,7 +91,7 @@ def clear_session_and_exit() -> None: print("\nโŒ Operation cancelled") sys.exit(0) - if clear_profile(): + if clear_profile(profile_dir): print("โœ… LinkedIn browser profile cleared successfully!") else: print("โŒ Failed to clear profile") @@ -98,8 +100,8 @@ def clear_session_and_exit() -> None: sys.exit(0) -def get_session_and_exit() -> None: - """Create session interactively and exit.""" +def get_profile_and_exit() -> None: + """Create profile interactively and exit.""" config = get_config() configure_logging( @@ -111,13 +113,13 @@ def get_session_and_exit() -> None: logger.info(f"LinkedIn MCP Server v{version} - Session Creation mode") user_data_dir = config.browser.user_data_dir - success = run_session_creation(user_data_dir) + success = run_profile_creation(user_data_dir) sys.exit(0 if success else 1) -def session_info_and_exit() -> None: - """Check session validity and display info, then exit.""" +def profile_info_and_exit() -> None: + """Check profile validity and display info, then exit.""" config = get_config() configure_logging( @@ -129,8 +131,9 @@ def session_info_and_exit() -> None: logger.info(f"LinkedIn MCP Server v{version} - Session Info mode") # Check if profile directory exists first - if not profile_exists(): - print(f"โŒ No browser profile found at {DEFAULT_PROFILE_DIR}") + profile_dir = get_profile_dir() + if not profile_exists(profile_dir): + print(f"โŒ No browser profile found at {profile_dir}") print(" Run with --get-session to create a profile") sys.exit(1) @@ -140,19 +143,27 @@ async def check_session() -> bool: set_headless(True) # Always check headless browser = await get_or_create_browser() valid = await is_logged_in(browser.page) - await close_browser() return valid - except Exception as e: - logger.error(f"Error checking session: {e}") + except AuthenticationError: return False + except Exception as e: + logger.exception(f"Unexpected error checking session: {e}") + raise + finally: + await close_browser() - valid = asyncio.run(check_session()) + try: + valid = asyncio.run(check_session()) + except Exception as e: + print(f"โŒ Could not validate session: {e}") + print(" Check logs and browser configuration.") + sys.exit(1) if valid: - print(f"โœ… Session is valid (profile: {DEFAULT_PROFILE_DIR})") + print(f"โœ… Session is valid (profile: {profile_dir})") sys.exit(0) else: - print(f"โŒ Session expired or invalid (profile: {DEFAULT_PROFILE_DIR})") + print(f"โŒ Session expired or invalid (profile: {profile_dir})") print(" Run with --get-session to re-authenticate") sys.exit(1) @@ -236,15 +247,15 @@ def main() -> None: # Handle --clear-session flag if config.server.clear_session: - clear_session_and_exit() + clear_profile_and_exit() # Handle --get-session flag if config.server.get_session: - get_session_and_exit() + get_profile_and_exit() # Handle --session-info flag if config.server.session_info: - session_info_and_exit() + profile_info_and_exit() logger.debug(f"Server configuration: {config}") diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index bf53ada0..2680a59d 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -95,7 +95,7 @@ def load_from_env(config: AppConfig) -> AppConfig: if user_data_dir := os.environ.get(EnvironmentKeys.USER_DATA_DIR): config.browser.user_data_dir = user_data_dir - # Timeout for page operations (semantic validation in BrowserConfig.__post_init__) + # Timeout for page operations (validated in BrowserConfig.validate()) if timeout_env := os.environ.get(EnvironmentKeys.TIMEOUT): try: config.browser.default_timeout = int(timeout_env) @@ -112,7 +112,7 @@ def load_from_env(config: AppConfig) -> AppConfig: if host_env := os.environ.get(EnvironmentKeys.HOST): config.server.host = host_env - # HTTP server port (range validation in AppConfig.__post_init__) + # HTTP server port (validated in AppConfig.validate()) if port_env := os.environ.get(EnvironmentKeys.PORT): try: config.server.port = int(port_env) @@ -123,7 +123,7 @@ def load_from_env(config: AppConfig) -> AppConfig: if path_env := os.environ.get(EnvironmentKeys.HTTP_PATH): config.server.path = path_env - # Slow motion delay for debugging (semantic validation in BrowserConfig.__post_init__) + # Slow motion delay for debugging (validated in BrowserConfig.validate()) if slow_mo_env := os.environ.get(EnvironmentKeys.SLOW_MO): try: config.browser.slow_mo = int(slow_mo_env) @@ -132,7 +132,7 @@ def load_from_env(config: AppConfig) -> AppConfig: f"Invalid SLOW_MO: '{slow_mo_env}'. Must be an integer." ) - # Browser viewport (dimension validation in BrowserConfig.__post_init__) + # Browser viewport (validated in BrowserConfig.validate()) if viewport_env := os.environ.get(EnvironmentKeys.VIEWPORT): try: width, height = viewport_env.lower().split("x") @@ -292,7 +292,7 @@ def load_from_args(config: AppConfig) -> AppConfig: if args.user_agent: config.browser.user_agent = args.user_agent - # Viewport (dimension validation in BrowserConfig.__post_init__) + # Viewport (validated in BrowserConfig.validate()) if args.viewport: try: width, height = args.viewport.lower().split("x") diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index 89490fe4..d7d2c5f2 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -13,8 +13,6 @@ class ConfigurationError(Exception): """Raised when configuration validation fails.""" - pass - @dataclass class BrowserConfig: diff --git a/linkedin_mcp_server/drivers/__init__.py b/linkedin_mcp_server/drivers/__init__.py index 98b2b888..7b287cc2 100644 --- a/linkedin_mcp_server/drivers/__init__.py +++ b/linkedin_mcp_server/drivers/__init__.py @@ -19,6 +19,7 @@ close_browser, ensure_authenticated, get_or_create_browser, + get_profile_dir, profile_exists, reset_browser_for_testing, set_headless, @@ -31,6 +32,7 @@ "close_browser", "ensure_authenticated", "get_or_create_browser", + "get_profile_dir", "profile_exists", "reset_browser_for_testing", "set_headless", diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 57bcbfd8..39601b1c 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -82,7 +82,7 @@ async def get_or_create_browser( viewport["height"], user_data_dir, ) - _browser = BrowserManager( + browser = BrowserManager( user_data_dir=user_data_dir, headless=_headless, slow_mo=config.browser.slow_mo, @@ -90,17 +90,19 @@ async def get_or_create_browser( viewport=viewport, **launch_options, ) - await _browser.start() + await browser.start() # Navigate to LinkedIn to check authentication - await _browser.page.goto("https://www.linkedin.com/feed/") - if await is_logged_in(_browser.page): - _apply_browser_settings(_browser) + await browser.page.goto("https://www.linkedin.com/feed/") + if await is_logged_in(browser.page): + _apply_browser_settings(browser) + _browser = browser # Assign only after auth succeeds return _browser - # No auth available - fail fast with clear error + # Auth failed โ€” clean up and fail fast + await browser.close() raise AuthenticationError( - "No authentication found. Run with --get-session to create a session." + "No authentication found. Run with --get-session to create a profile." ) @@ -115,11 +117,17 @@ async def close_browser() -> None: logger.info("Browser closed") +def get_profile_dir() -> Path: + """Get the resolved profile directory from config.""" + config = get_config() + return Path(config.browser.user_data_dir).expanduser() + + def profile_exists(profile_dir: Path | None = None) -> bool: """Check if a persistent browser profile exists and is non-empty.""" if profile_dir is None: - profile_dir = DEFAULT_PROFILE_DIR - return profile_dir.exists() and any(profile_dir.iterdir()) + profile_dir = get_profile_dir() + return profile_dir.is_dir() and any(profile_dir.iterdir()) def set_headless(headless: bool) -> None: diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index a3eb1cdb..73561eab 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -58,14 +58,14 @@ def convert_exception_to_response( return { "error": "authentication_not_found", "message": str(exception), - "resolution": "Run with --get-session to create a session file", + "resolution": "Run with --get-session to create a browser profile.", } elif isinstance(exception, SessionExpiredError): return { "error": "session_expired", "message": str(exception), - "resolution": "Run with --get-session to create a new session", + "resolution": "Run with --get-session to create a new browser profile.", } elif isinstance(exception, AuthenticationError): diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index df82f631..a9b9c273 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -6,15 +6,12 @@ """ import asyncio -import logging from pathlib import Path from linkedin_scraper import BrowserManager, wait_for_manual_login from linkedin_scraper.core import warm_up_browser -from linkedin_mcp_server.drivers.browser import DEFAULT_PROFILE_DIR - -logger = logging.getLogger(__name__) +from linkedin_mcp_server.drivers.browser import get_profile_dir async def interactive_login( @@ -28,7 +25,7 @@ async def interactive_login( Profile state auto-persists to user_data_dir. Args: - user_data_dir: Path to browser profile. Defaults to ~/.linkedin-mcp/profile + user_data_dir: Path to browser profile. Defaults to config's user_data_dir. warm_up: Visit normal sites first to appear more human-like (default: True) Returns: @@ -38,7 +35,7 @@ async def interactive_login( Exception: If login fails or times out """ if user_data_dir is None: - user_data_dir = DEFAULT_PROFILE_DIR + user_data_dir = get_profile_dir() print("Opening browser for LinkedIn login...") print(" Please log in manually. You have 5 minutes to complete authentication.") @@ -61,29 +58,29 @@ async def interactive_login( return True -def run_session_creation(user_data_dir: str | None = None) -> bool: +def run_profile_creation(user_data_dir: str | None = None) -> bool: """ - Create session via interactive login with persistent profile. + Create profile via interactive login with persistent context. Args: - user_data_dir: Path to profile directory. Defaults to ~/.linkedin-mcp/profile + user_data_dir: Path to profile directory. Defaults to config's user_data_dir. Returns: - True if session was created successfully + True if profile was created successfully """ if user_data_dir: profile_dir = Path(user_data_dir).expanduser() else: - profile_dir = DEFAULT_PROFILE_DIR + profile_dir = get_profile_dir() - print("LinkedIn MCP Server - Session Creation") + print("LinkedIn MCP Server - Profile Creation") print(f" Profile will be saved to: {profile_dir}") try: success = asyncio.run(interactive_login(profile_dir)) return success except Exception as e: - print(f"Session creation failed: {e}") + print(f"Profile creation failed: {e}") return False diff --git a/tests/conftest.py b/tests/conftest.py index 14b1b520..cddf48e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,8 +16,10 @@ def reset_singletons(): @pytest.fixture(autouse=True) def isolate_profile_dir(tmp_path, monkeypatch): - """Redirect DEFAULT_PROFILE_DIR to tmp_path.""" + """Redirect profile directory to tmp_path via config and DEFAULT_PROFILE_DIR.""" fake_profile = tmp_path / "profile" + + # Patch DEFAULT_PROFILE_DIR for any code still referencing the constant for module in [ "linkedin_mcp_server.drivers.browser", "linkedin_mcp_server.authentication", @@ -28,6 +30,19 @@ def isolate_profile_dir(tmp_path, monkeypatch): monkeypatch.setattr(f"{module}.DEFAULT_PROFILE_DIR", fake_profile) except AttributeError: pass # Module may not be imported yet + + # Patch get_profile_dir() in all modules that import it + for gp_module in [ + "linkedin_mcp_server.drivers.browser", + "linkedin_mcp_server.authentication", + "linkedin_mcp_server.cli_main", + "linkedin_mcp_server.setup", + ]: + try: + monkeypatch.setattr(f"{gp_module}.get_profile_dir", lambda: fake_profile) + except AttributeError: + pass + return fake_profile diff --git a/tests/test_authentication.py b/tests/test_authentication.py index e02e9f2b..de1c146e 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -1,24 +1,58 @@ import pytest from linkedin_mcp_server.authentication import clear_profile, get_authentication_source +from linkedin_mcp_server.drivers.browser import profile_exists from linkedin_mcp_server.exceptions import CredentialsNotFoundError +# --- profile_exists() tests --- + + +def test_profile_exists_missing_dir(tmp_path): + """Missing directory returns False.""" + assert profile_exists(tmp_path / "nonexistent") is False + + +def test_profile_exists_empty_dir(tmp_path): + """Empty directory returns False.""" + empty = tmp_path / "empty" + empty.mkdir() + assert profile_exists(empty) is False + + +def test_profile_exists_non_empty_dir(profile_dir): + """Non-empty directory returns True.""" + assert profile_exists(profile_dir) is True + + +def test_profile_exists_file_path(tmp_path): + """A file (not directory) returns False.""" + f = tmp_path / "not_a_dir" + f.write_text("data") + assert profile_exists(f) is False + + +# --- get_authentication_source() tests --- + + def test_get_auth_source_profile(profile_dir, monkeypatch): monkeypatch.setattr( - "linkedin_mcp_server.authentication.profile_exists", lambda: True + "linkedin_mcp_server.authentication.profile_exists", lambda _dir=None: True ) assert get_authentication_source() is True def test_get_auth_source_none_raises(monkeypatch): monkeypatch.setattr( - "linkedin_mcp_server.authentication.profile_exists", lambda: False + "linkedin_mcp_server.authentication.profile_exists", lambda _dir=None: False ) with pytest.raises(CredentialsNotFoundError): get_authentication_source() +# --- clear_profile() tests --- + + def test_clear_profile_removes_dir(profile_dir): assert profile_dir.exists() result = clear_profile(profile_dir) diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py new file mode 100644 index 00000000..8184d5d1 --- /dev/null +++ b/tests/test_browser_driver.py @@ -0,0 +1,117 @@ +"""Tests for linkedin_mcp_server.drivers.browser singleton lifecycle.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from linkedin_mcp_server.config.schema import AppConfig +from linkedin_mcp_server.drivers.browser import ( + get_or_create_browser, + reset_browser_for_testing, +) + + +@pytest.fixture(autouse=True) +def _reset_browser(): + """Ensure clean singleton state for each test.""" + reset_browser_for_testing() + yield + reset_browser_for_testing() + + +@pytest.fixture(autouse=True) +def _mock_config(monkeypatch, tmp_path): + """Provide a test config so get_config() never triggers argparse.""" + config = AppConfig() + config.browser.user_data_dir = str(tmp_path / "profile") + monkeypatch.setattr( + "linkedin_mcp_server.drivers.browser.get_config", lambda: config + ) + + +def _make_mock_browser(*, logged_in: bool = True) -> MagicMock: + """Create a mock BrowserManager with controllable login state.""" + browser = MagicMock() + browser.start = AsyncMock() + browser.close = AsyncMock() + browser.page = MagicMock() + browser.page.goto = AsyncMock() + browser.page.set_default_timeout = MagicMock() + return browser + + +@pytest.mark.asyncio +async def test_get_or_create_browser_auth_success(monkeypatch): + """Successful auth assigns singleton and returns browser.""" + mock_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=mock_browser, + ), + patch( + "linkedin_mcp_server.drivers.browser.is_logged_in", + new_callable=AsyncMock, + return_value=True, + ), + ): + result = await get_or_create_browser() + + assert result is mock_browser + mock_browser.start.assert_awaited_once() + mock_browser.page.goto.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_get_or_create_browser_auth_failure_cleans_up(monkeypatch): + """Failed auth closes browser and does NOT assign singleton.""" + from linkedin_scraper import AuthenticationError + + mock_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=mock_browser, + ), + patch( + "linkedin_mcp_server.drivers.browser.is_logged_in", + new_callable=AsyncMock, + return_value=False, + ), + pytest.raises(AuthenticationError), + ): + await get_or_create_browser() + + # Browser must be closed on failure + mock_browser.close.assert_awaited_once() + + # Singleton must NOT be set โ€” next call should create fresh browser + from linkedin_mcp_server.drivers.browser import _browser + + assert _browser is None + + +@pytest.mark.asyncio +async def test_singleton_returns_existing_browser(monkeypatch): + """Second call returns the same browser instance (singleton).""" + mock_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=mock_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.is_logged_in", + new_callable=AsyncMock, + return_value=True, + ), + ): + first = await get_or_create_browser() + second = await get_or_create_browser() + + assert first is second + # Constructor should only be called once + ctor.assert_called_once() From 03f0e44282d32107222ea3f33a1c446ab5b948e4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 00:30:59 +0100 Subject: [PATCH 360/565] chore: bump version to 3.0.0 --- pyproject.toml | 2 +- uv.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8c399233..6182cdf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "2.3.7" +version = "3.0.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 9ee81afa..79bc414e 100644 --- a/uv.lock +++ b/uv.lock @@ -1012,7 +1012,7 @@ wheels = [ [[package]] name = "linkedin-scraper" version = "3.1.1" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context#5505ad45bf772fa33a945f651ceed8c3675517db" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context#6df8f338cf68a1b7afd52f752b0666c5d03c1cb5" } dependencies = [ { name = "aiofiles" }, { name = "lxml" }, @@ -1024,7 +1024,7 @@ dependencies = [ [[package]] name = "linkedin-scraper-mcp" -version = "2.3.7" +version = "3.0.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From e6a55404ee41621bbd132e392e432c2261f6ca52 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 12 Feb 2026 23:33:07 +0000 Subject: [PATCH 361/565] chore: update manifest.json and docker-compose.yml to v3.0.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b8468a88..4b6527ce 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:2.3.7 + image: stickerdaniel/linkedin-mcp-server:3.0.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index de7b57ae..c48b54d6 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "2.3.7", + "version": "3.0.0", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:2.3.7", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.0", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:2.3.7" + "stickerdaniel/linkedin-mcp-server:3.0.0" ] } }, From 994f9836724c8b66d66a1596036189eff07f273c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 00:57:36 +0100 Subject: [PATCH 362/565] fix(deps): use person scraper fix --- pyproject.toml | 2 +- uv.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6182cdf5..bca61302 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ exclude = ["assets*", "docs*", "tests*"] linkedin_mcp_server = ["py.typed"] [tool.uv.sources] -linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "feat/patchright-persistent-context" } +linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "02-13-fix_person_quick_fix_for_person_scraper_further_testing_required" } [dependency-groups] dev = [ diff --git a/uv.lock b/uv.lock index 79bc414e..6a2a0cdf 100644 --- a/uv.lock +++ b/uv.lock @@ -1012,7 +1012,7 @@ wheels = [ [[package]] name = "linkedin-scraper" version = "3.1.1" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context#6df8f338cf68a1b7afd52f752b0666c5d03c1cb5" } +source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=02-13-fix_person_quick_fix_for_person_scraper_further_testing_required#69550fdead51ee6edd148b76bc728bdcabb792cf" } dependencies = [ { name = "aiofiles" }, { name = "lxml" }, @@ -1051,7 +1051,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=feat%2Fpatchright-persistent-context" }, + { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=02-13-fix_person_quick_fix_for_person_scraper_further_testing_required" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, From 8f9cf974e6f23e176efa50237b328f7a743df5c7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 01:07:15 +0100 Subject: [PATCH 363/565] chore: bump version to 3.0.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bca61302..ac7408fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "3.0.0" +version = "3.0.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 6a2a0cdf..843a392e 100644 --- a/uv.lock +++ b/uv.lock @@ -1024,7 +1024,7 @@ dependencies = [ [[package]] name = "linkedin-scraper-mcp" -version = "3.0.0" +version = "3.0.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 030c1bbf41d56e7a1817acc4c43dc42ceb0f4ca8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Feb 2026 00:10:54 +0000 Subject: [PATCH 364/565] chore: update manifest.json and docker-compose.yml to v3.0.1 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4b6527ce..bb278960 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:3.0.0 + image: stickerdaniel/linkedin-mcp-server:3.0.1 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index c48b54d6..f130583c 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "3.0.0", + "version": "3.0.1", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.0", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.1", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:3.0.0" + "stickerdaniel/linkedin-mcp-server:3.0.1" ] } }, From e3ffbe98866fa8fd7156c3d1a8498f9432d61aed Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 14:12:46 +0100 Subject: [PATCH 365/565] fix(deps)!: switch to linkedin-scraper-patchright from PyPI --- pyproject.toml | 7 ++----- uv.lock | 36 ++++++++++++++++++++---------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ac7408fe..e89e4f70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "3.0.1" +version = "3.0.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" @@ -34,7 +34,7 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper", + "linkedin-scraper-patchright>=3.1.1", "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", @@ -62,9 +62,6 @@ exclude = ["assets*", "docs*", "tests*"] [tool.setuptools.package-data] linkedin_mcp_server = ["py.typed"] -[tool.uv.sources] -linkedin-scraper = { git = "https://github.com/stickerdaniel/linkedin_scraper.git", rev = "02-13-fix_person_quick_fix_for_person_scraper_further_testing_required" } - [dependency-groups] dev = [ "aiohttp>=3.12.13", diff --git a/uv.lock b/uv.lock index 843a392e..64897126 100644 --- a/uv.lock +++ b/uv.lock @@ -1009,27 +1009,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, ] -[[package]] -name = "linkedin-scraper" -version = "3.1.1" -source = { git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=02-13-fix_person_quick_fix_for_person_scraper_further_testing_required#69550fdead51ee6edd148b76bc728bdcabb792cf" } -dependencies = [ - { name = "aiofiles" }, - { name = "lxml" }, - { name = "patchright" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "requests" }, -] - [[package]] name = "linkedin-scraper-mcp" -version = "3.0.1" +version = "3.0.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, - { name = "linkedin-scraper" }, + { name = "linkedin-scraper-patchright" }, { name = "patchright" }, { name = "pyperclip" }, { name = "python-dotenv" }, @@ -1051,7 +1038,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper", git = "https://github.com/stickerdaniel/linkedin_scraper.git?rev=02-13-fix_person_quick_fix_for_person_scraper_further_testing_required" }, + { name = "linkedin-scraper-patchright", specifier = ">=3.1.1" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1069,6 +1056,23 @@ dev = [ { name = "ty", specifier = ">=0.0.1a12" }, ] +[[package]] +name = "linkedin-scraper-patchright" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiofiles" }, + { name = "lxml" }, + { name = "patchright" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/fe/35b02ca3ef02140c39b79e2effe7d27679311106c16640e8bc0b17056f06/linkedin_scraper_patchright-3.1.1.tar.gz", hash = "sha256:5919678907a2ea262adabff342a9b3f6b0b173af44a3dc6b6cbadaa430ac60d6", size = 47996, upload-time = "2026-02-13T13:12:09.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/08/0009c54e3cc6504cd5eb40af7dad20826331aef6e1993d575c12b1a6e316/linkedin_scraper_patchright-3.1.1-py3-none-any.whl", hash = "sha256:a9ae51bae3d39e633848def98e6bffd7bb13a8f1c650b1f5ec3cef98182b3b3e", size = 54616, upload-time = "2026-02-13T13:12:07.024Z" }, +] + [[package]] name = "lupa" version = "2.6" From 0122904339a89954e4bd5ea2d90723df4eb40268 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Feb 2026 13:13:41 +0000 Subject: [PATCH 366/565] chore: update manifest.json and docker-compose.yml to v3.0.2 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index bb278960..af84519f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:3.0.1 + image: stickerdaniel/linkedin-mcp-server:3.0.2 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index f130583c..6fbfab1d 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "3.0.1", + "version": "3.0.2", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.1", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.2", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:3.0.1" + "stickerdaniel/linkedin-mcp-server:3.0.2" ] } }, From 0b7eb46fb0c39fd6255568a766aec43dcfe94e35 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 15:26:01 +0100 Subject: [PATCH 367/565] feat: add cookie bridge for cross-platform Docker portability --- .gitignore | 3 +++ linkedin_mcp_server/drivers/browser.py | 15 +++++++++++++++ linkedin_mcp_server/setup.py | 15 +++++++++++++++ pyproject.toml | 2 +- tests/test_browser_driver.py | 2 ++ uv.lock | 8 ++++---- 6 files changed, 40 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 99ef49d5..7559fe2e 100644 --- a/.gitignore +++ b/.gitignore @@ -199,3 +199,6 @@ cython_debug/ # opencode .opencode/plans + +# Portable cookie file (contains session data) +cookies.json diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 39601b1c..ffbcb0eb 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -99,6 +99,16 @@ async def get_or_create_browser( _browser = browser # Assign only after auth succeeds return _browser + # Auth failed โ€” try importing portable cookies (cross-platform support) + logger.info("Native auth failed, attempting portable cookie import...") + if await browser.import_cookies(): + await browser.page.goto("https://www.linkedin.com/feed/") + if await is_logged_in(browser.page): + logger.info("Authentication recovered via portable cookies") + _apply_browser_settings(browser) + _browser = browser + return _browser + # Auth failed โ€” clean up and fail fast await browser.close() raise AuthenticationError( @@ -112,6 +122,11 @@ async def close_browser() -> None: if _browser is not None: logger.info("Closing browser...") + # Export cookies before closing to keep portable file fresh + try: + await _browser.export_cookies() + except Exception: + logger.debug("Cookie export on close skipped", exc_info=True) await _browser.close() _browser = None logger.info("Browser closed") diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index a9b9c273..afd5b8f1 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -54,6 +54,21 @@ async def interactive_login( # 5 minute timeout (300000ms) allows time for 2FA, captcha, security challenges await wait_for_manual_login(browser.page, timeout=300000) + # Wait for persistent context to flush cookies to disk + await asyncio.sleep(2) + + # Verify session cookie was persisted + cookies = await browser.context.cookies() + li_at = [c for c in cookies if c["name"] == "li_at"] + if not li_at: + print(" Warning: Session cookie not found. Login may not have persisted.") + print(" Waiting longer for cookie propagation...") + await asyncio.sleep(5) + + # Export cookies for cross-platform portability (macOS -> Docker) + if await browser.export_cookies(): + print(" Cookies exported for Docker portability") + print(f"Profile saved to {user_data_dir}") return True diff --git a/pyproject.toml b/pyproject.toml index e89e4f70..5dfd3477 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper-patchright>=3.1.1", + "linkedin-scraper-patchright>=3.1.2", "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 8184d5d1..9ddca981 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -37,6 +37,8 @@ def _make_mock_browser(*, logged_in: bool = True) -> MagicMock: browser.page = MagicMock() browser.page.goto = AsyncMock() browser.page.set_default_timeout = MagicMock() + browser.import_cookies = AsyncMock(return_value=False) + browser.export_cookies = AsyncMock(return_value=False) return browser diff --git a/uv.lock b/uv.lock index 64897126..bc702012 100644 --- a/uv.lock +++ b/uv.lock @@ -1038,7 +1038,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper-patchright", specifier = ">=3.1.1" }, + { name = "linkedin-scraper-patchright", specifier = ">=3.1.2" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1058,7 +1058,7 @@ dev = [ [[package]] name = "linkedin-scraper-patchright" -version = "3.1.1" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -1068,9 +1068,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/fe/35b02ca3ef02140c39b79e2effe7d27679311106c16640e8bc0b17056f06/linkedin_scraper_patchright-3.1.1.tar.gz", hash = "sha256:5919678907a2ea262adabff342a9b3f6b0b173af44a3dc6b6cbadaa430ac60d6", size = 47996, upload-time = "2026-02-13T13:12:09.405Z" } +sdist = { url = "https://files.pythonhosted.org/packages/97/c5/ef6ff0ddadf8b30e5047e1d6f8035bc3f6032c538031ad78ffd49e5f4719/linkedin_scraper_patchright-3.1.2.tar.gz", hash = "sha256:2bcab90f31fe0140541988b18bcfbb5c31abe0331ea77dfb00a60c0e3e675248", size = 47059, upload-time = "2026-02-13T14:24:21.636Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/08/0009c54e3cc6504cd5eb40af7dad20826331aef6e1993d575c12b1a6e316/linkedin_scraper_patchright-3.1.1-py3-none-any.whl", hash = "sha256:a9ae51bae3d39e633848def98e6bffd7bb13a8f1c650b1f5ec3cef98182b3b3e", size = 54616, upload-time = "2026-02-13T13:12:07.024Z" }, + { url = "https://files.pythonhosted.org/packages/2f/38/f03a4cf5e28d964b1d5febf2071014c678fad4d0fdc4a74ddb7a57c6e80b/linkedin_scraper_patchright-3.1.2-py3-none-any.whl", hash = "sha256:02f9b0d4fbd9c5e6f00fbb8e9db752571bf8fb29684f920c93bed3d30b5eee25", size = 53511, upload-time = "2026-02-13T14:24:19.99Z" }, ] [[package]] From 595e4a5fc27a76c24ab051e896b6a0f48a67dedc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 15:40:25 +0100 Subject: [PATCH 368/565] chore: bump version to 3.0.3 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5dfd3477..7431b515 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "3.0.2" +version = "3.0.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index bc702012..b25bf4f7 100644 --- a/uv.lock +++ b/uv.lock @@ -1011,7 +1011,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "3.0.2" +version = "3.0.3" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 3854933c23a50583311e1e4a43fa60faa19f14f5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 15:56:31 +0100 Subject: [PATCH 369/565] chore: bump linkedin-scraper-patchright to 3.1.3 Includes person scraper fix for new LinkedIn layout --- pyproject.toml | 2 +- uv.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7431b515..1d3d4656 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper-patchright>=3.1.2", + "linkedin-scraper-patchright>=3.1.3", "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/uv.lock b/uv.lock index b25bf4f7..08ad22cf 100644 --- a/uv.lock +++ b/uv.lock @@ -1038,7 +1038,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper-patchright", specifier = ">=3.1.2" }, + { name = "linkedin-scraper-patchright", specifier = ">=3.1.3" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1058,7 +1058,7 @@ dev = [ [[package]] name = "linkedin-scraper-patchright" -version = "3.1.2" +version = "3.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -1068,9 +1068,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/c5/ef6ff0ddadf8b30e5047e1d6f8035bc3f6032c538031ad78ffd49e5f4719/linkedin_scraper_patchright-3.1.2.tar.gz", hash = "sha256:2bcab90f31fe0140541988b18bcfbb5c31abe0331ea77dfb00a60c0e3e675248", size = 47059, upload-time = "2026-02-13T14:24:21.636Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/b5/f294b785b1291df745dfefe3afc3326938a6449be81ef08dd7a5902542fa/linkedin_scraper_patchright-3.1.3.tar.gz", hash = "sha256:99038565f2e9f78fc6c2f4fc9db886e6f2ae34427da7b0ce2517563c7fe5caa2", size = 48632, upload-time = "2026-02-13T14:55:35.81Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/38/f03a4cf5e28d964b1d5febf2071014c678fad4d0fdc4a74ddb7a57c6e80b/linkedin_scraper_patchright-3.1.2-py3-none-any.whl", hash = "sha256:02f9b0d4fbd9c5e6f00fbb8e9db752571bf8fb29684f920c93bed3d30b5eee25", size = 53511, upload-time = "2026-02-13T14:24:19.99Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ad/05d08ac2995ce002c8acc1ac708a95c101d0402effb740f6bbf412cf09fc/linkedin_scraper_patchright-3.1.3-py3-none-any.whl", hash = "sha256:b27f4f89e6d15ed4fead2fbf2a54703cba28cc7e5eb55b6f343925940233779f", size = 55245, upload-time = "2026-02-13T14:55:34.144Z" }, ] [[package]] From 05efe7a476ca09ab68fe6444c09960a6ee86ed11 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Feb 2026 14:58:01 +0000 Subject: [PATCH 370/565] chore: update manifest.json and docker-compose.yml to v3.0.3 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index af84519f..d7a5f283 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:3.0.2 + image: stickerdaniel/linkedin-mcp-server:3.0.3 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 6fbfab1d..b49af3ab 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "3.0.2", + "version": "3.0.3", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.2", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.3", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:3.0.2" + "stickerdaniel/linkedin-mcp-server:3.0.3" ] } }, From 16c5795fa1e9bd678628a401e471aba20db96ccb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 17:45:48 +0100 Subject: [PATCH 371/565] fix(docker): copy profile to temp dir for cross-platform cookie bridge When native auth fails and a portable cookie file exists, copy the browser profile to a temp directory before restarting with imported cookies. This prevents Docker's Linux Chromium from corrupting the macOS profile by writing Linux-encrypted data back to the mounted dir. Also removes the encrypted Cookies DB from the copy since it can't be decrypted cross-platform, and bumps linkedin-scraper-patchright to 3.1.4 which fixes cookie domain normalization and auth-only import. --- linkedin_mcp_server/drivers/browser.py | 48 +++++++++++++++++++++----- pyproject.toml | 2 +- uv.lock | 8 ++--- 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index ffbcb0eb..e30efb01 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -7,6 +7,8 @@ """ import logging +import shutil +import tempfile from pathlib import Path from linkedin_scraper import ( @@ -99,15 +101,45 @@ async def get_or_create_browser( _browser = browser # Assign only after auth succeeds return _browser - # Auth failed โ€” try importing portable cookies (cross-platform support) - logger.info("Native auth failed, attempting portable cookie import...") - if await browser.import_cookies(): + # Native auth failed โ€” try the cross-platform cookie bridge. + # On macOSโ†’Linux, Chromium can't decrypt macOS-encrypted cookies in the + # persistent profile. We copy the profile to a temp dir (so the original + # isn't corrupted by Linux Chromium writing back), remove the undecryptable + # Cookies DB, and inject auth cookies from the portable JSON file. + cookie_path = user_data_dir.parent / "cookies.json" + if cookie_path.exists(): + logger.info("Native auth failed, attempting cross-platform cookie bridge...") + await browser.close() + + # Copy profile to temp dir โ€” protects the macOS original + temp_dir = Path(tempfile.mkdtemp(prefix="linkedin-mcp-")) + temp_profile = temp_dir / "profile" + shutil.copytree(user_data_dir, temp_profile) + + # Remove encrypted Cookies DB (can't be decrypted cross-platform) + (temp_profile / "Default" / "Cookies").unlink(missing_ok=True) + (temp_profile / "Default" / "Cookies-journal").unlink(missing_ok=True) + + browser = BrowserManager( + user_data_dir=temp_profile, + headless=_headless, + slow_mo=config.browser.slow_mo, + user_agent=config.browser.user_agent, + viewport=viewport, + **launch_options, + ) + await browser.start() + + # First nav establishes session cookies (bcookie, JSESSIONID, etc.) await browser.page.goto("https://www.linkedin.com/feed/") - if await is_logged_in(browser.page): - logger.info("Authentication recovered via portable cookies") - _apply_browser_settings(browser) - _browser = browser - return _browser + # Import auth cookies (li_at, li_rm) from the portable file + if await browser.import_cookies(cookie_path): + await browser.page.goto("https://www.linkedin.com/feed/") + if await is_logged_in(browser.page): + logger.info("Authentication recovered via portable cookies") + _apply_browser_settings(browser) + _browser = browser + return _browser # Auth failed โ€” clean up and fail fast await browser.close() diff --git a/pyproject.toml b/pyproject.toml index 1d3d4656..89a8f36d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper-patchright>=3.1.3", + "linkedin-scraper-patchright>=3.1.4", "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/uv.lock b/uv.lock index 08ad22cf..9d630e00 100644 --- a/uv.lock +++ b/uv.lock @@ -1038,7 +1038,7 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper-patchright", specifier = ">=3.1.3" }, + { name = "linkedin-scraper-patchright", specifier = ">=3.1.4" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1058,7 +1058,7 @@ dev = [ [[package]] name = "linkedin-scraper-patchright" -version = "3.1.3" +version = "3.1.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -1068,9 +1068,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/b5/f294b785b1291df745dfefe3afc3326938a6449be81ef08dd7a5902542fa/linkedin_scraper_patchright-3.1.3.tar.gz", hash = "sha256:99038565f2e9f78fc6c2f4fc9db886e6f2ae34427da7b0ce2517563c7fe5caa2", size = 48632, upload-time = "2026-02-13T14:55:35.81Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/39/9455b68ee039bbbeafeddc4f48d95c337d29a92d620891efcf661a294744/linkedin_scraper_patchright-3.1.4.tar.gz", hash = "sha256:e460fe79db266fd4d1dae66c42cc0fa7b13400672c9ee996f45aa624232d0fa0", size = 47736, upload-time = "2026-02-13T16:44:39.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/ad/05d08ac2995ce002c8acc1ac708a95c101d0402effb740f6bbf412cf09fc/linkedin_scraper_patchright-3.1.3-py3-none-any.whl", hash = "sha256:b27f4f89e6d15ed4fead2fbf2a54703cba28cc7e5eb55b6f343925940233779f", size = 55245, upload-time = "2026-02-13T14:55:34.144Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f8/61d90fdb7aaf8b63557e5bbf907b4b274ed28f646925933798f49294877e/linkedin_scraper_patchright-3.1.4-py3-none-any.whl", hash = "sha256:c5b9e76ea6b2fb01d21e2c2ad840fd4ce447a09ead552c8a5be2ab54b9ad7149", size = 54228, upload-time = "2026-02-13T16:44:37.447Z" }, ] [[package]] From 8598ab6896cc218449540a616a4d06940022f65c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Feb 2026 18:15:41 +0100 Subject: [PATCH 372/565] chore: bump version to 3.0.4 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 89a8f36d..b3d49b46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "3.0.3" +version = "3.0.4" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 9d630e00..caa3c38c 100644 --- a/uv.lock +++ b/uv.lock @@ -1011,7 +1011,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "3.0.3" +version = "3.0.4" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 3e4f59fc59e2815e6cc12e044ccd7d735914f140 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Feb 2026 17:16:55 +0000 Subject: [PATCH 373/565] chore: update manifest.json and docker-compose.yml to v3.0.4 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d7a5f283..5d48b5d5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:3.0.3 + image: stickerdaniel/linkedin-mcp-server:3.0.4 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index b49af3ab..34723b09 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "3.0.3", + "version": "3.0.4", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.3", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.4", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:3.0.3" + "stickerdaniel/linkedin-mcp-server:3.0.4" ] } }, From 8480a892ea4a27f55c90a752e42772a3d0191bd0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 15 Feb 2026 21:01:05 +0100 Subject: [PATCH 374/565] docs(README): Update status of get_company_profile to 'Issues' --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 557107cd..55a4ccc8 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | Tool | Description | Status | |------|-------------|--------| | `get_person_profile` | Get detailed profile info including work history, education, contacts, interests | Working | -| `get_company_profile` | Extract company information including employees, affiliated companies | Working | +| `get_company_profile` | Extract company information including employees, affiliated companies | Issues | | `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | | `search_jobs` | Search for jobs with keywords and location filters | Working | | `get_job_details` | Get detailed information about a specific job posting | Working | From 91c71c8c62554ebfc9b31d649a886f41fd6a7f4f Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Sun, 15 Feb 2026 21:06:07 +0100 Subject: [PATCH 375/565] docs(README): Revise breaking change notice in README Updated breaking change notice to include information about LinkedIn's recent changes and the new cookie file requirement for Docker. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 55a4ccc8..5ade6f0e 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company > The browser profile at `~/.linkedin-mcp/profile/` contains sensitive authentication data. Keep it secure and do not share it. > [!IMPORTANT] -> **Breaking change:** This version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile. +> **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile + cookie file that can be mounted in docker. 02/2026

From e9c3032fb57b2a69611f5e0b905d7cd324128fe6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 16 Feb 2026 00:40:15 +0000 Subject: [PATCH 376/565] chore(deps): lock file maintenance --- uv.lock | 790 ++++++++++++++++++++++++++------------------------------ 1 file changed, 371 insertions(+), 419 deletions(-) diff --git a/uv.lock b/uv.lock index caa3c38c..32a91e52 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.12" [[package]] @@ -118,6 +118,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -160,14 +169,14 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.6" +version = "1.6.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/6c/c88eac87468c607f88bc24df1f3b31445ee6fc9ba123b09e666adf687cd9/authlib-1.6.8.tar.gz", hash = "sha256:41ae180a17cf672bc784e4a518e5c82687f1fe1e98b0cafaeda80c8e4ab2d1cb", size = 165074, upload-time = "2026-02-14T04:02:17.941Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, + { url = "https://files.pythonhosted.org/packages/9b/73/f7084bf12755113cd535ae586782ff3a6e710bfbe6a0d13d1c2f81ffbbfa/authlib-1.6.8-py2.py3-none-any.whl", hash = "sha256:97286fd7a15e6cfefc32771c8ef9c54f0ed58028f1322de6a2a7c969c3817888", size = 244116, upload-time = "2026-02-14T04:02:15.579Z" }, ] [[package]] @@ -181,24 +190,24 @@ wheels = [ [[package]] name = "blessed" -version = "1.27.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinxed", marker = "sys_platform == 'win32'" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/3c/783f2a400e5dac56ad073997aa6aa47150c3b06a5ce8ad2f537f3691eaaa/blessed-1.27.0.tar.gz", hash = "sha256:e3064559388bd532ab6460d9b6c7d6dd699c4e0cf54d28ed6e2cab12feda13bb", size = 6761573, upload-time = "2026-01-20T04:16:56.233Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/19/e926a0dbbf93c7aeb15d4dfff0d0e3de02653b3ba540b687307d0819c1ff/blessed-1.30.0.tar.gz", hash = "sha256:4d547019d7b40fc5420ea2ba2bc180fdccc31d6715298e2b49ffa7b020d44667", size = 13948932, upload-time = "2026-02-06T19:40:23.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/d9/11d745a88e9000729fc4d9e813789a95327beda325e04ec311e9ae23a30e/blessed-1.27.0-py3-none-any.whl", hash = "sha256:1c599969acc993bb5842bf3f638b0691e335277a9d9058cd079463a346988714", size = 101305, upload-time = "2026-01-20T04:16:54.095Z" }, + { url = "https://files.pythonhosted.org/packages/64/b0/8d87c7c8015ce8d4b2c5ee7a82a1d955f10138322c4f0cb387d7d2c1b2e7/blessed-1.30.0-py3-none-any.whl", hash = "sha256:4061a9f10dd22798716c2548ba36385af6a29d856c897f367c6ccc927e0b3a5a", size = 98399, upload-time = "2026-02-06T19:40:20.815Z" }, ] [[package]] name = "cachetools" -version = "6.2.4" +version = "7.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/07/56595285564e90777d758ebd383d6b0b971b87729bbe2184a849932a3736/cachetools-7.0.1.tar.gz", hash = "sha256:e31e579d2c5b6e2944177a0397150d312888ddf4e16e12f1016068f0c03b8341", size = 36126, upload-time = "2026-02-10T22:24:05.03Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9e/5faefbf9db1db466d633735faceda1f94aa99ce506ac450d232536266b32/cachetools-7.0.1-py3-none-any.whl", hash = "sha256:8f086515c254d5664ae2146d14fc7f65c9a4bce75152eb247e5a9c5e6d7b2ecf", size = 13484, upload-time = "2026-02-10T22:24:03.741Z" }, ] [[package]] @@ -365,137 +374,157 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, - { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, - { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, - { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, - { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, - { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, - { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, - { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, - { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, - { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, - { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, - { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, - { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, - { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, - { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, - { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, - { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, - { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, - { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, - { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, - { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, - { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, - { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, - { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, - { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, - { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, - { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, - { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, - { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, - { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, - { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, - { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, - { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, - { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, - { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, - { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, - { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, - { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, - { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, - { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, - { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, - { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, - { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, - { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, - { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[[package]] +name = "croniter" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, ] [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, ] [[package]] name = "cyclopts" -version = "4.5.0" +version = "4.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -503,9 +532,9 @@ dependencies = [ { name = "rich" }, { name = "rich-rst" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/7b/663f3285c1ac0e5d0854bd9db2c87caa6fa3d1a063185e3394a6cdca9151/cyclopts-4.5.0.tar.gz", hash = "sha256:717ac4235548b58d500baf7e688aa4d024caf0ee68f61a012ffd5e29db3099f9", size = 161980, upload-time = "2026-01-16T02:07:16.171Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/cd/1fd03921a95113182e6fdf84af5d47f07aa91c00c03ac074c192b0d4672c/cyclopts-4.5.2.tar.gz", hash = "sha256:7fe01b2d184c55c4555e06a0397602b319d87faa5b086b41913eaeaea52fae16", size = 162381, upload-time = "2026-02-11T16:30:46.051Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/a3/2e00fececc34a99ae3a5d5702a5dd29c5371e4ed016647301a2b9bcc1976/cyclopts-4.5.0-py3-none-any.whl", hash = "sha256:305b9aa90a9cd0916f0a450b43e50ad5df9c252680731a0719edfb9b20381bf5", size = 199772, upload-time = "2026-01-16T02:07:14.707Z" }, + { url = "https://files.pythonhosted.org/packages/2b/03/f906829bcfcbb945f19d6a64240ffb66a31d69ca5533e95882f0efc9c13c/cyclopts-4.5.2-py3-none-any.whl", hash = "sha256:ee56ee23c2c81abc34b66b5aa8fd2698ca699740054e84e534449ec3eb7f944d", size = 200165, upload-time = "2026-02-11T16:30:46.942Z" }, ] [[package]] @@ -555,15 +584,15 @@ wheels = [ [[package]] name = "editor" -version = "1.6.6" +version = "1.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "runs" }, { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/92/734a4ab345914259cb6146fd36512608ea42be16195375c379046f33283d/editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8", size = 3197, upload-time = "2024-01-25T10:44:59.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/4f/00e0b75d86bb1e6a943c08942619e3f31de54a0dce3b33b14ae3c2af2dc0/editor-1.7.0.tar.gz", hash = "sha256:979b25e3f7e0386af4478e7392ecb99e6c16a42db7c4336d6b16658fa0449fb3", size = 2355, upload-time = "2026-02-03T13:51:30.717Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/c2/4bc8cd09b14e28ce3f406a8b05761bed0d785d1ca8c2a5c6684d884c66a2/editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf", size = 4017, upload-time = "2024-01-25T10:44:58.66Z" }, + { url = "https://files.pythonhosted.org/packages/a6/b5/f566c215c58d7d2b8d39104b6cda00f31a18bb480486cb7f0d68de6131f9/editor-1.7.0-py3-none-any.whl", hash = "sha256:8b1ad5e99846b076b96b18f7bc39ae21952c8e20d375c3f8f98fd02cacf19367", size = 3383, upload-time = "2026-02-03T13:51:29.075Z" }, ] [[package]] @@ -620,7 +649,7 @@ lua = [ [[package]] name = "fastmcp" -version = "2.14.4" +version = "2.14.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, @@ -642,18 +671,18 @@ dependencies = [ { name = "uvicorn" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/a9/a57d5e5629ebd4ef82b495a7f8e346ce29ef80cc86b15c8c40570701b94d/fastmcp-2.14.4.tar.gz", hash = "sha256:c01f19845c2adda0a70d59525c9193be64a6383014c8d40ce63345ac664053ff", size = 8302239, upload-time = "2026-01-22T17:29:37.024Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/32/982678d44f13849530a74ab101ed80e060c2ee6cf87471f062dcf61705fd/fastmcp-2.14.5.tar.gz", hash = "sha256:38944dc582c541d55357082bda2241cedb42cd3a78faea8a9d6a2662c62a42d7", size = 8296329, upload-time = "2026-02-03T15:35:21.005Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/41/c4d407e2218fd60d84acb6cc5131d28ff876afecf325e3fd9d27b8318581/fastmcp-2.14.4-py3-none-any.whl", hash = "sha256:5858cff5e4c8ea8107f9bca2609d71d6256e0fce74495912f6e51625e466c49a", size = 417788, upload-time = "2026-01-22T17:29:35.159Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c1/1a35ec68ff76ea8443aa115b18bcdee748a4ada2124537ee90522899ff9f/fastmcp-2.14.5-py3-none-any.whl", hash = "sha256:d81e8ec813f5089d3624bec93944beaefa86c0c3a4ef1111cbef676a761ebccf", size = 417784, upload-time = "2026-02-03T15:35:18.489Z" }, ] [[package]] name = "filelock" -version = "3.20.3" +version = "3.24.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/8a/b24ff2c2d7f20ce930b5efe91e7260247d185d8939707721168ad204e465/filelock-3.24.1.tar.gz", hash = "sha256:3440181dd03f8904c108c8e9f5b11d1663e9fc960f1c837586a11f1c5c041e54", size = 37452, upload-time = "2026-02-15T22:03:16.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, + { url = "https://files.pythonhosted.org/packages/97/64/3613e89811e79aca8d0d4f2c984fc66336bc9d83529c1cbe02f5df010d0a/filelock-3.24.1-py3-none-any.whl", hash = "sha256:7c59f595e3cf4887dc95b403a896849da49ed183d7c9d7ee855646ca99f10698", size = 24153, upload-time = "2026-02-15T22:03:15.262Z" }, ] [[package]] @@ -1262,101 +1291,101 @@ wheels = [ [[package]] name = "multidict" -version = "6.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, - { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, - { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, - { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, - { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, - { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, - { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, - { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, - { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, - { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, - { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, - { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, - { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, - { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, - { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, - { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, - { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, - { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, - { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, - { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, - { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, - { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, - { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, - { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, - { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, - { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, - { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, - { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, - { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, - { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, - { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, - { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, - { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, - { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, - { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, - { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, - { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, - { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, - { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, - { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, - { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, - { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, - { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, - { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, - { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, - { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, - { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, - { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, - { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, - { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, - { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, - { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, - { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, - { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, - { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, - { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, - { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, - { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, - { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, - { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, - { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, - { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, - { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, - { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, ] [[package]] @@ -1393,62 +1422,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, ] -[[package]] -name = "opentelemetry-exporter-prometheus" -version = "0.60b1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-sdk" }, - { name = "prometheus-client" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/14/39/7dafa6fff210737267bed35a8855b6ac7399b9e582b8cf1f25f842517012/opentelemetry_exporter_prometheus-0.60b1.tar.gz", hash = "sha256:a4011b46906323f71724649d301b4dc188aaa068852e814f4df38cc76eac616b", size = 14976, upload-time = "2025-12-11T13:32:42.944Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/0d/4be6bf5477a3eb3d917d2f17d3c0b6720cd6cb97898444a61d43cc983f5c/opentelemetry_exporter_prometheus-0.60b1-py3-none-any.whl", hash = "sha256:49f59178de4f4590e3cef0b8b95cf6e071aae70e1f060566df5546fad773b8fd", size = 13019, upload-time = "2025-12-11T13:32:23.974Z" }, -] - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.60b1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "packaging" }, - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" }, -] - -[[package]] -name = "opentelemetry-sdk" -version = "1.39.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, -] - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.60b1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, -] - [[package]] name = "packaging" version = "26.0" @@ -1497,11 +1470,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/d5/763666321efaded11112de8b7a7f2273dd8d1e205168e73c334e54b0ab9a/platformdirs-4.9.1.tar.gz", hash = "sha256:f310f16e89c4e29117805d8328f7c10876eeff36c94eac879532812110f7d39f", size = 28392, upload-time = "2026-02-14T21:02:44.973Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/70/77/e8c95e95f1d4cdd88c90a96e31980df7e709e51059fac150046ad67fac63/platformdirs-4.9.1-py3-none-any.whl", hash = "sha256:61d8b967d34791c162d30d60737369cbbd77debad5b981c4bfda1842e71e0d66", size = 21307, upload-time = "2026-02-14T21:02:43.492Z" }, ] [[package]] @@ -1765,28 +1738,27 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.12.0" +version = "2.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/a1/ae859ffac5a3338a66b74c5e29e244fd3a3cc483c89feaf9f56c39898d75/pydantic_settings-2.13.0.tar.gz", hash = "sha256:95d875514610e8595672800a5c40b073e99e4aae467fa7c8f9c263061ea2e1fe", size = 222450, upload-time = "2026-02-15T12:11:23.476Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1a/dd1b9d7e627486cf8e7523d09b70010e05a4bc41414f4ae6ce184cf0afb6/pydantic_settings-2.13.0-py3-none-any.whl", hash = "sha256:d67b576fff39cd086b595441bf9c75d4193ca9c0ed643b90360694d0f1240246", size = 58429, upload-time = "2026-02-15T12:11:22.133Z" }, ] [[package]] name = "pydocket" -version = "0.16.6" +version = "0.17.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cloudpickle" }, + { name = "croniter" }, { name = "fakeredis", extra = ["lua"] }, { name = "opentelemetry-api" }, - { name = "opentelemetry-exporter-prometheus" }, - { name = "opentelemetry-instrumentation" }, { name = "prometheus-client" }, { name = "py-key-value-aio", extra = ["memory", "redis"] }, { name = "python-json-logger" }, @@ -1795,21 +1767,21 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/00/26befe5f58df7cd1aeda4a8d10bc7d1908ffd86b80fd995e57a2a7b3f7bd/pydocket-0.16.6.tar.gz", hash = "sha256:b96c96ad7692827214ed4ff25fcf941ec38371314db5dcc1ae792b3e9d3a0294", size = 299054, upload-time = "2026-01-09T22:09:15.405Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/b2/5e12dbe2acf59e4499285e8eee66e8e81b6ba2f553696d2f4ccca0a7978c/pydocket-0.17.7.tar.gz", hash = "sha256:5c77ec6731a167cdcb44174abf793fe63e7b6c1c1c8a799cc6ec7502b361ee77", size = 347071, upload-time = "2026-02-11T21:01:31.744Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/3f/7483e5a6dc6326b6e0c640619b5c5bd1d6e3c20e54d58f5fb86267cef00e/pydocket-0.16.6-py3-none-any.whl", hash = "sha256:683d21e2e846aa5106274e7d59210331b242d7fb0dce5b08d3b82065663ed183", size = 67697, upload-time = "2026-01-09T22:09:13.436Z" }, + { url = "https://files.pythonhosted.org/packages/c9/c7/68f2553819965326f968375f02597d49efe71b309ba9d8fef539aeb51c48/pydocket-0.17.7-py3-none-any.whl", hash = "sha256:d1e0921ac02026c4a0140fc72a3848545f3e91e6e74c6e32c588489017c130b2", size = 94608, upload-time = "2026-02-11T21:01:30.111Z" }, ] [[package]] name = "pyee" -version = "13.0.0" +version = "13.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/04/e7c1fe4dc78a6fdbfd6c337b1c3732ff543b8a397683ab38378447baa331/pyee-13.0.1.tar.gz", hash = "sha256:0b931f7c14535667ed4c7e0d531716368715e860b988770fc7eb8578d1f67fc8", size = 31655, upload-time = "2026-02-14T21:12:28.044Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c4/b4d4827c93ef43c01f599ef31453ccc1c132b353284fc6c87d535c233129/pyee-13.0.1-py3-none-any.whl", hash = "sha256:af2f8fede4171ef667dfded53f96e2ed0d6e6bd7ee3bb46437f77e3b57689228", size = 15659, upload-time = "2026-02-14T21:12:26.263Z" }, ] [[package]] @@ -1823,11 +1795,11 @@ wheels = [ [[package]] name = "pyjwt" -version = "2.10.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, + { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, ] [package.optional-dependencies] @@ -1900,6 +1872,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "python-dotenv" version = "1.2.1" @@ -1920,11 +1904,20 @@ wheels = [ [[package]] name = "python-multipart" -version = "0.0.21" +version = "0.0.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] [[package]] @@ -2009,11 +2002,11 @@ wheels = [ [[package]] name = "redis" -version = "7.1.0" +version = "7.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/80/2971931d27651affa88a44c0ad7b8c4a19dc29c998abb20b23868d319b59/redis-7.1.1.tar.gz", hash = "sha256:a2814b2bda15b39dad11391cc48edac4697214a8a5a4bd10abe936ab4892eb43", size = 4800064, upload-time = "2026-02-09T18:39:40.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" }, + { url = "https://files.pythonhosted.org/packages/29/55/1de1d812ba1481fa4b37fb03b4eec0fcb71b6a0d44c04ea3482eb017600f/redis-7.1.1-py3-none-any.whl", hash = "sha256:f77817f16071c2950492c67d40b771fa493eb3fccc630a424a10976dbb794b7a", size = 356057, upload-time = "2026-02-09T18:39:38.602Z" }, ] [[package]] @@ -2047,15 +2040,15 @@ wheels = [ [[package]] name = "rich" -version = "14.3.0" +version = "14.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/9c/137848452e130e71f3ca9a9876751ddcac99e4b1f248ed297996c8c2d728/rich-14.3.0.tar.gz", hash = "sha256:b75e54d3abbcc49137e83e4db54dc86c5e47687eebc95aa0305363231a36e699", size = 230113, upload-time = "2026-01-24T12:25:46.336Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/e0/83cbdcb81b5cbbbe355648dd402b410437806544f48ee218a2354798f012/rich-14.3.0-py3-none-any.whl", hash = "sha256:0b8c1e368c1125b9e993c2d2f1342802525f4853fc6dac2e8e9e88bac0f45bce", size = 309950, upload-time = "2026-01-24T12:25:44.679Z" }, + { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, ] [[package]] @@ -2154,40 +2147,39 @@ wheels = [ [[package]] name = "ruff" -version = "0.14.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732, upload-time = "2026-01-22T22:30:17.527Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650, upload-time = "2026-01-22T22:30:08.578Z" }, - { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245, upload-time = "2026-01-22T22:30:04.155Z" }, - { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273, upload-time = "2026-01-22T22:30:34.642Z" }, - { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753, upload-time = "2026-01-22T22:30:31.781Z" }, - { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052, upload-time = "2026-01-22T22:30:24.827Z" }, - { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637, upload-time = "2026-01-22T22:30:13.239Z" }, - { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761, upload-time = "2026-01-22T22:30:22.542Z" }, - { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701, upload-time = "2026-01-22T22:30:20.499Z" }, - { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455, upload-time = "2026-01-22T22:29:59.679Z" }, - { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882, upload-time = "2026-01-22T22:29:57.032Z" }, - { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549, upload-time = "2026-01-22T22:30:27.175Z" }, - { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416, upload-time = "2026-01-22T22:30:01.964Z" }, - { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491, upload-time = "2026-01-22T22:30:29.51Z" }, - { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525, upload-time = "2026-01-22T22:30:06.499Z" }, - { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626, upload-time = "2026-01-22T22:30:36.848Z" }, - { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442, upload-time = "2026-01-22T22:30:38.93Z" }, - { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486, upload-time = "2026-01-22T22:30:10.852Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448, upload-time = "2026-01-22T22:30:15.417Z" }, +version = "0.15.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/dc/4e6ac71b511b141cf626357a3946679abeba4cf67bc7cc5a17920f31e10d/ruff-0.15.1.tar.gz", hash = "sha256:c590fe13fb57c97141ae975c03a1aedb3d3156030cabd740d6ff0b0d601e203f", size = 4540855, upload-time = "2026-02-12T23:09:09.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/bf/e6e4324238c17f9d9120a9d60aa99a7daaa21204c07fcd84e2ef03bb5fd1/ruff-0.15.1-py3-none-linux_armv6l.whl", hash = "sha256:b101ed7cf4615bda6ffe65bdb59f964e9f4a0d3f85cbf0e54f0ab76d7b90228a", size = 10367819, upload-time = "2026-02-12T23:09:03.598Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ea/c8f89d32e7912269d38c58f3649e453ac32c528f93bb7f4219258be2e7ed/ruff-0.15.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:939c995e9277e63ea632cc8d3fae17aa758526f49a9a850d2e7e758bfef46602", size = 10798618, upload-time = "2026-02-12T23:09:22.928Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0f/1d0d88bc862624247d82c20c10d4c0f6bb2f346559d8af281674cf327f15/ruff-0.15.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d83466455fdefe60b8d9c8df81d3c1bbb2115cede53549d3b522ce2bc703899", size = 10148518, upload-time = "2026-02-12T23:08:58.339Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c8/291c49cefaa4a9248e986256df2ade7add79388fe179e0691be06fae6f37/ruff-0.15.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9457e3c3291024866222b96108ab2d8265b477e5b1534c7ddb1810904858d16", size = 10518811, upload-time = "2026-02-12T23:09:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/c3/1a/f5707440e5ae43ffa5365cac8bbb91e9665f4a883f560893829cf16a606b/ruff-0.15.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92c92b003e9d4f7fbd33b1867bb15a1b785b1735069108dfc23821ba045b29bc", size = 10196169, upload-time = "2026-02-12T23:09:17.306Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ff/26ddc8c4da04c8fd3ee65a89c9fb99eaa5c30394269d424461467be2271f/ruff-0.15.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe5c41ab43e3a06778844c586251eb5a510f67125427625f9eb2b9526535779", size = 10990491, upload-time = "2026-02-12T23:09:25.503Z" }, + { url = "https://files.pythonhosted.org/packages/fc/00/50920cb385b89413f7cdb4bb9bc8fc59c1b0f30028d8bccc294189a54955/ruff-0.15.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66a6dd6df4d80dc382c6484f8ce1bcceb55c32e9f27a8b94c32f6c7331bf14fb", size = 11843280, upload-time = "2026-02-12T23:09:19.88Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6d/2f5cad8380caf5632a15460c323ae326f1e1a2b5b90a6ee7519017a017ca/ruff-0.15.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a4a42cbb8af0bda9bcd7606b064d7c0bc311a88d141d02f78920be6acb5aa83", size = 11274336, upload-time = "2026-02-12T23:09:14.907Z" }, + { url = "https://files.pythonhosted.org/packages/a3/1d/5f56cae1d6c40b8a318513599b35ea4b075d7dc1cd1d04449578c29d1d75/ruff-0.15.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab064052c31dddada35079901592dfba2e05f5b1e43af3954aafcbc1096a5b2", size = 11137288, upload-time = "2026-02-12T23:09:07.475Z" }, + { url = "https://files.pythonhosted.org/packages/cd/20/6f8d7d8f768c93b0382b33b9306b3b999918816da46537d5a61635514635/ruff-0.15.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5631c940fe9fe91f817a4c2ea4e81f47bee3ca4aa646134a24374f3c19ad9454", size = 11070681, upload-time = "2026-02-12T23:08:55.43Z" }, + { url = "https://files.pythonhosted.org/packages/9a/67/d640ac76069f64cdea59dba02af2e00b1fa30e2103c7f8d049c0cff4cafd/ruff-0.15.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:68138a4ba184b4691ccdc39f7795c66b3c68160c586519e7e8444cf5a53e1b4c", size = 10486401, upload-time = "2026-02-12T23:09:27.927Z" }, + { url = "https://files.pythonhosted.org/packages/65/3d/e1429f64a3ff89297497916b88c32a5cc88eeca7e9c787072d0e7f1d3e1e/ruff-0.15.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:518f9af03bfc33c03bdb4cb63fabc935341bb7f54af500f92ac309ecfbba6330", size = 10197452, upload-time = "2026-02-12T23:09:12.147Z" }, + { url = "https://files.pythonhosted.org/packages/78/83/e2c3bade17dad63bf1e1c2ffaf11490603b760be149e1419b07049b36ef2/ruff-0.15.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da79f4d6a826caaea95de0237a67e33b81e6ec2e25fc7e1993a4015dffca7c61", size = 10693900, upload-time = "2026-02-12T23:09:34.418Z" }, + { url = "https://files.pythonhosted.org/packages/a1/27/fdc0e11a813e6338e0706e8b39bb7a1d61ea5b36873b351acee7e524a72a/ruff-0.15.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3dd86dccb83cd7d4dcfac303ffc277e6048600dfc22e38158afa208e8bf94a1f", size = 11227302, upload-time = "2026-02-12T23:09:36.536Z" }, + { url = "https://files.pythonhosted.org/packages/f6/58/ac864a75067dcbd3b95be5ab4eb2b601d7fbc3d3d736a27e391a4f92a5c1/ruff-0.15.1-py3-none-win32.whl", hash = "sha256:660975d9cb49b5d5278b12b03bb9951d554543a90b74ed5d366b20e2c57c2098", size = 10462555, upload-time = "2026-02-12T23:09:29.899Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5e/d4ccc8a27ecdb78116feac4935dfc39d1304536f4296168f91ed3ec00cd2/ruff-0.15.1-py3-none-win_amd64.whl", hash = "sha256:c820fef9dd5d4172a6570e5721704a96c6679b80cf7be41659ed439653f62336", size = 11599956, upload-time = "2026-02-12T23:09:01.157Z" }, + { url = "https://files.pythonhosted.org/packages/2a/07/5bda6a85b220c64c65686bc85bd0bbb23b29c62b3a9f9433fa55f17cda93/ruff-0.15.1-py3-none-win_arm64.whl", hash = "sha256:5ff7d5f0f88567850f45081fac8f4ec212be8d0b963e385c3f7d0d2eb4899416", size = 10874604, upload-time = "2026-02-12T23:09:05.515Z" }, ] [[package]] name = "runs" -version = "1.2.2" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "xmod" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/6d/b9aace390f62db5d7d2c77eafce3d42774f27f1829d24fa9b6f598b3ef71/runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1", size = 5474, upload-time = "2024-01-25T14:44:01.563Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/ae/095cb626504733e288a81f871f86b10530b787d77c50193c170daaca0df1/runs-1.3.0.tar.gz", hash = "sha256:cca304b631dbefec598c7bfbcfb50d6feace6d3a968734b67fd42d3c728f5a05", size = 4585, upload-time = "2026-02-03T15:59:58.974Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/d6/17caf2e4af1dec288477a0cbbe4a96fbc9b8a28457dce3f1f452630ce216/runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd", size = 7033, upload-time = "2024-01-25T14:43:59.959Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b6/049c75d399ccf6e25abea0652b85bf7e7e101e0300aa9c1d284ad7061c0b/runs-1.3.0-py3-none-any.whl", hash = "sha256:e71a551cfa8da9ef882cac1d5a108bda78c9edee5b8d87e37c1003da5b6a7bed", size = 6406, upload-time = "2026-02-03T15:59:59.96Z" }, ] [[package]] @@ -2212,6 +2204,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sortedcontainers" version = "2.4.0" @@ -2249,41 +2250,41 @@ wheels = [ [[package]] name = "ty" -version = "0.0.13" +version = "0.0.17" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/dc/b607f00916f5a7c52860b84a66dc17bc6988e8445e96b1d6e175a3837397/ty-0.0.13.tar.gz", hash = "sha256:7a1d135a400ca076407ea30012d1f75419634160ed3b9cad96607bf2956b23b3", size = 4999183, upload-time = "2026-01-21T13:21:16.133Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c3/41ae6346443eedb65b96761abfab890a48ce2aa5a8a27af69c5c5d99064d/ty-0.0.17.tar.gz", hash = "sha256:847ed6c120913e280bf9b54d8eaa7a1049708acb8824ad234e71498e8ad09f97", size = 5167209, upload-time = "2026-02-13T13:26:36.835Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/df/3632f1918f4c0a33184f107efc5d436ab6da147fd3d3b94b3af6461efbf4/ty-0.0.13-py3-none-linux_armv6l.whl", hash = "sha256:1b2b8e02697c3a94c722957d712a0615bcc317c9b9497be116ef746615d892f2", size = 9993501, upload-time = "2026-01-21T13:21:26.628Z" }, - { url = "https://files.pythonhosted.org/packages/92/87/6a473ced5ac280c6ce5b1627c71a8a695c64481b99aabc798718376a441e/ty-0.0.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f15cdb8e233e2b5adfce673bb21f4c5e8eaf3334842f7eea3c70ac6fda8c1de5", size = 9860986, upload-time = "2026-01-21T13:21:24.425Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9b/d89ae375cf0a7cd9360e1164ce017f8c753759be63b6a11ed4c944abe8c6/ty-0.0.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0819e89ac9f0d8af7a062837ce197f0461fee2fc14fd07e2c368780d3a397b73", size = 9350748, upload-time = "2026-01-21T13:21:28.502Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a6/9ad58518056fab344b20c0bb2c1911936ebe195318e8acc3bc45ac1c6b6b/ty-0.0.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de79f481084b7cc7a202ba0d7a75e10970d10ffa4f025b23f2e6b7324b74886", size = 9849884, upload-time = "2026-01-21T13:21:21.886Z" }, - { url = "https://files.pythonhosted.org/packages/b1/c3/8add69095fa179f523d9e9afcc15a00818af0a37f2b237a9b59bc0046c34/ty-0.0.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4fb2154cff7c6e95d46bfaba283c60642616f20d73e5f96d0c89c269f3e1bcec", size = 9822975, upload-time = "2026-01-21T13:21:14.292Z" }, - { url = "https://files.pythonhosted.org/packages/a4/05/4c0927c68a0a6d43fb02f3f0b6c19c64e3461dc8ed6c404dde0efb8058f7/ty-0.0.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00be58d89337c27968a20d58ca553458608c5b634170e2bec82824c2e4cf4d96", size = 10294045, upload-time = "2026-01-21T13:21:30.505Z" }, - { url = "https://files.pythonhosted.org/packages/b4/86/6dc190838aba967557fe0bfd494c595d00b5081315a98aaf60c0e632aaeb/ty-0.0.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72435eade1fa58c6218abb4340f43a6c3ff856ae2dc5722a247d3a6dd32e9737", size = 10916460, upload-time = "2026-01-21T13:21:07.788Z" }, - { url = "https://files.pythonhosted.org/packages/04/40/9ead96b7c122e1109dfcd11671184c3506996bf6a649306ec427e81d9544/ty-0.0.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77a548742ee8f621d718159e7027c3b555051d096a49bb580249a6c5fc86c271", size = 10597154, upload-time = "2026-01-21T13:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/aa/7d/e832a2c081d2be845dc6972d0c7998914d168ccbc0b9c86794419ab7376e/ty-0.0.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da067c57c289b7cf914669704b552b6207c2cc7f50da4118c3e12388642e6b3f", size = 10410710, upload-time = "2026-01-21T13:21:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/31/e3/898be3a96237a32f05c4c29b43594dc3b46e0eedfe8243058e46153b324f/ty-0.0.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d1b50a01fffa140417fca5a24b658fbe0734074a095d5b6f0552484724474343", size = 9826299, upload-time = "2026-01-21T13:21:00.845Z" }, - { url = "https://files.pythonhosted.org/packages/bb/eb/db2d852ce0ed742505ff18ee10d7d252f3acfd6fc60eca7e9c7a0288a6d8/ty-0.0.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0f33c46f52e5e9378378eca0d8059f026f3c8073ace02f7f2e8d079ddfe5207e", size = 9831610, upload-time = "2026-01-21T13:21:05.842Z" }, - { url = "https://files.pythonhosted.org/packages/9e/61/149f59c8abaddcbcbb0bd13b89c7741ae1c637823c5cf92ed2c644fcadef/ty-0.0.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:168eda24d9a0b202cf3758c2962cc295878842042b7eca9ed2965259f59ce9f2", size = 9978885, upload-time = "2026-01-21T13:21:10.306Z" }, - { url = "https://files.pythonhosted.org/packages/a0/cd/026d4e4af60a80918a8d73d2c42b8262dd43ab2fa7b28d9743004cb88d57/ty-0.0.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d4917678b95dc8cb399cc459fab568ba8d5f0f33b7a94bf840d9733043c43f29", size = 10506453, upload-time = "2026-01-21T13:20:56.633Z" }, - { url = "https://files.pythonhosted.org/packages/63/06/8932833a4eca2df49c997a29afb26721612de8078ae79074c8fe87e17516/ty-0.0.13-py3-none-win32.whl", hash = "sha256:c1f2ec40daa405508b053e5b8e440fbae5fdb85c69c9ab0ee078f8bc00eeec3d", size = 9433482, upload-time = "2026-01-21T13:20:58.717Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fd/e8d972d1a69df25c2cecb20ea50e49ad5f27a06f55f1f5f399a563e71645/ty-0.0.13-py3-none-win_amd64.whl", hash = "sha256:8b7b1ab9f187affbceff89d51076038363b14113be29bda2ddfa17116de1d476", size = 10319156, upload-time = "2026-01-21T13:21:03.266Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c2/05fdd64ac003a560d4fbd1faa7d9a31d75df8f901675e5bed1ee2ceeff87/ty-0.0.13-py3-none-win_arm64.whl", hash = "sha256:1c9630333497c77bb9bcabba42971b96ee1f36c601dd3dcac66b4134f9fa38f0", size = 9808316, upload-time = "2026-01-21T13:20:54.053Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/0ef15c22a1c54b0f728ceff3f62d478dbf8b0dcf8ff7b80b954f79584f3e/ty-0.0.17-py3-none-linux_armv6l.whl", hash = "sha256:64a9a16555cc8867d35c2647c2f1afbd3cae55f68fd95283a574d1bb04fe93e0", size = 10192793, upload-time = "2026-02-13T13:27:13.943Z" }, + { url = "https://files.pythonhosted.org/packages/0f/2c/f4c322d9cded56edc016b1092c14b95cf58c8a33b4787316ea752bb9418e/ty-0.0.17-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:eb2dbd8acd5c5a55f4af0d479523e7c7265a88542efe73ed3d696eb1ba7b6454", size = 10051977, upload-time = "2026-02-13T13:26:57.741Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a5/43746c1ff81e784f5fc303afc61fe5bcd85d0fcf3ef65cb2cef78c7486c7/ty-0.0.17-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f18f5fd927bc628deb9ea2df40f06b5f79c5ccf355db732025a3e8e7152801f6", size = 9564639, upload-time = "2026-02-13T13:26:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b8/280b04e14a9c0474af574f929fba2398b5e1c123c1e7735893b4cd73d13c/ty-0.0.17-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5383814d1d7a5cc53b3b07661856bab04bb2aac7a677c8d33c55169acdaa83df", size = 10061204, upload-time = "2026-02-13T13:27:00.152Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d7/493e1607d8dfe48288d8a768a2adc38ee27ef50e57f0af41ff273987cda0/ty-0.0.17-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c20423b8744b484f93e7bf2ef8a9724bca2657873593f9f41d08bd9f83444c9", size = 10013116, upload-time = "2026-02-13T13:26:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/80/ef/22f3ed401520afac90dbdf1f9b8b7755d85b0d5c35c1cb35cf5bd11b59c2/ty-0.0.17-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6f5b1aba97db9af86517b911674b02f5bc310750485dc47603a105bd0e83ddd", size = 10533623, upload-time = "2026-02-13T13:26:31.449Z" }, + { url = "https://files.pythonhosted.org/packages/75/ce/744b15279a11ac7138832e3a55595706b4a8a209c9f878e3ab8e571d9032/ty-0.0.17-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:488bce1a9bea80b851a97cd34c4d2ffcd69593d6c3f54a72ae02e5c6e47f3d0c", size = 11069750, upload-time = "2026-02-13T13:26:48.638Z" }, + { url = "https://files.pythonhosted.org/packages/f2/be/1133c91f15a0e00d466c24f80df486d630d95d1b2af63296941f7473812f/ty-0.0.17-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8df66b91ec84239420985ec215e7f7549bfda2ac036a3b3c065f119d1c06825a", size = 10870862, upload-time = "2026-02-13T13:26:54.715Z" }, + { url = "https://files.pythonhosted.org/packages/3e/4a/a2ed209ef215b62b2d3246e07e833081e07d913adf7e0448fc204be443d6/ty-0.0.17-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:002139e807c53002790dfefe6e2f45ab0e04012e76db3d7c8286f96ec121af8f", size = 10628118, upload-time = "2026-02-13T13:26:45.439Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0c/87476004cb5228e9719b98afffad82c3ef1f84334bde8527bcacba7b18cb/ty-0.0.17-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6c4e01f05ce82e5d489ab3900ca0899a56c4ccb52659453780c83e5b19e2b64c", size = 10038185, upload-time = "2026-02-13T13:27:02.693Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/98f0b3ba9aef53c1f0305519536967a4aa793a69ed72677b0a625c5313ac/ty-0.0.17-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2b226dd1e99c0d2152d218c7e440150d1a47ce3c431871f0efa073bbf899e881", size = 10047644, upload-time = "2026-02-13T13:27:05.474Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/06737bb80aa1a9103b8651d2eb691a7e53f1ed54111152be25f4a02745db/ty-0.0.17-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8b11f1da7859e0ad69e84b3c5ef9a7b055ceed376a432fad44231bdfc48061c2", size = 10231140, upload-time = "2026-02-13T13:27:10.844Z" }, + { url = "https://files.pythonhosted.org/packages/7c/79/e2a606bd8852383ba9abfdd578f4a227bd18504145381a10a5f886b4e751/ty-0.0.17-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c04e196809ff570559054d3e011425fd7c04161529eb551b3625654e5f2434cb", size = 10718344, upload-time = "2026-02-13T13:26:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2d/2663984ac11de6d78f74432b8b14ba64d170b45194312852b7543cf7fd56/ty-0.0.17-py3-none-win32.whl", hash = "sha256:305b6ed150b2740d00a817b193373d21f0767e10f94ac47abfc3b2e5a5aec809", size = 9672932, upload-time = "2026-02-13T13:27:08.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/b5/39be78f30b31ee9f5a585969930c7248354db90494ff5e3d0756560fb731/ty-0.0.17-py3-none-win_amd64.whl", hash = "sha256:531828267527aee7a63e972f54e5eee21d9281b72baf18e5c2850c6b862add83", size = 10542138, upload-time = "2026-02-13T13:27:17.084Z" }, + { url = "https://files.pythonhosted.org/packages/40/b7/f875c729c5d0079640c75bad2c7e5d43edc90f16ba242f28a11966df8f65/ty-0.0.17-py3-none-win_arm64.whl", hash = "sha256:de9810234c0c8d75073457e10a84825b9cd72e6629826b7f01c7a0b266ae25b1", size = 10023068, upload-time = "2026-02-13T13:26:39.637Z" }, ] [[package]] name = "typer" -version = "0.21.1" +version = "0.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "annotated-doc" }, { name = "click" }, { name = "rich" }, { name = "shellingham" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/07/b822e1b307d40e263e8253d2384cf98c51aa2368cc7ba9a07e523a1d964b/typer-0.23.1.tar.gz", hash = "sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134", size = 120047, upload-time = "2026-02-13T10:04:30.984Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/d5/91/9b286ab899c008c2cb05e8be99814807e7fbbd33f0c0c960470826e5ac82/typer-0.23.1-py3-none-any.whl", hash = "sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e", size = 56813, upload-time = "2026-02-13T10:04:32.008Z" }, ] [[package]] @@ -2345,11 +2346,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.3.3" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/3a/c63d2afd6dc2cad55a44bea48c7db75edde859e320bdceb9351ba63fceb6/wcwidth-0.3.3.tar.gz", hash = "sha256:f8f7d42c8a067d909b80b425342d02c423c5edc546347475e1d402fe3d35bb63", size = 233784, upload-time = "2026-01-24T16:23:58.578Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/bc/ab575ebf0254577034d23908299b0d13ea5d7ceb35f43a5c08acf2252826/wcwidth-0.3.3-py3-none-any.whl", hash = "sha256:8e9056c446f21c7393514946d143a748c56aad72476844d3f215f7915276508f", size = 86509, upload-time = "2026-01-24T16:23:56.966Z" }, + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] [[package]] @@ -2397,62 +2398,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] -[[package]] -name = "wrapt" -version = "1.17.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, - { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, - { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, - { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, - { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, - { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, - { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, - { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, - { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, - { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, - { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, - { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, - { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, - { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, - { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, - { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, - { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, - { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, - { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, - { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, - { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, - { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, - { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, - { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, - { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, - { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, - { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, - { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, - { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, - { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, - { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, - { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, - { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, - { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, -] - [[package]] name = "xmod" -version = "1.8.1" +version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/b2/e3edc608823348e628a919e1d7129e641997afadd946febdd704aecc5881/xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377", size = 3988, upload-time = "2024-01-04T18:03:17.663Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/3f/0bc3b89c1dd4dee1f954db4c857f8fbe9cdfa8b25efe370b6d78399a93ac/xmod-1.9.0.tar.gz", hash = "sha256:98b2e7e8e659c51b635f4e98faf3fa1f3f96dab2805f19ddd6e352bbb4d23991", size = 3501, upload-time = "2026-02-03T14:34:48.881Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/0dc75b64a764ea1cb8e4c32d1fb273c147304d4e5483cd58be482dc62e45/xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48", size = 4610, upload-time = "2024-01-04T18:03:16.078Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a4/74b9510cf2922fb923f6330fd47c049e9e89d984d6dd445c82a85ce7c4e9/xmod-1.9.0-py3-none-any.whl", hash = "sha256:0a549a055e0391a53e356a63552baa7e562560a6e9423c1437cb53b5d4f697a0", size = 4451, upload-time = "2026-02-03T14:34:48.032Z" }, ] [[package]] From 8f3b70df13e7693579d87662a14f2abc027d8799 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 11:31:58 +0100 Subject: [PATCH 377/565] feat(scraper): refactored scraping tools to use innerText extraction with configurable section selection for person, company, and job profiles. Removed dependency on linkedin-scraper and integrated core functionalities directly. Enhanced error handling and progress reporting in the MCP server. Added the ability to specify scraping scopes for improved time efficiency --- AGENTS.md | 33 ++- README.md | 8 +- btca.config.jsonc | 7 - linkedin_mcp_server/callbacks.py | 17 +- linkedin_mcp_server/cli_main.py | 3 +- linkedin_mcp_server/core/__init__.py | 31 +++ linkedin_mcp_server/core/auth.py | 114 +++++++++ linkedin_mcp_server/core/browser.py | 230 +++++++++++++++++ linkedin_mcp_server/core/exceptions.py | 45 ++++ linkedin_mcp_server/core/utils.py | 113 +++++++++ linkedin_mcp_server/drivers/browser.py | 10 +- linkedin_mcp_server/error_handler.py | 2 +- linkedin_mcp_server/scraping/__init__.py | 17 ++ linkedin_mcp_server/scraping/extractor.py | 285 +++++++++++++++++++++ linkedin_mcp_server/scraping/fields.py | 67 +++++ linkedin_mcp_server/setup.py | 7 +- linkedin_mcp_server/tools/company.py | 100 ++++---- linkedin_mcp_server/tools/job.py | 66 +++-- linkedin_mcp_server/tools/person.py | 59 +++-- pyproject.toml | 3 +- tests/test_browser_driver.py | 2 +- tests/test_error_handler.py | 4 +- tests/test_fields.py | 128 ++++++++++ tests/test_scraping.py | 292 ++++++++++++++++++++++ tests/test_tools.py | 138 +++++++--- uv.lock | 110 +------- 26 files changed, 1603 insertions(+), 288 deletions(-) create mode 100644 linkedin_mcp_server/core/__init__.py create mode 100644 linkedin_mcp_server/core/auth.py create mode 100644 linkedin_mcp_server/core/browser.py create mode 100644 linkedin_mcp_server/core/exceptions.py create mode 100644 linkedin_mcp_server/core/utils.py create mode 100644 linkedin_mcp_server/scraping/__init__.py create mode 100644 linkedin_mcp_server/scraping/extractor.py create mode 100644 linkedin_mcp_server/scraping/fields.py create mode 100644 tests/test_fields.py create mode 100644 tests/test_scraping.py diff --git a/AGENTS.md b/AGENTS.md index 3d81c378..bc74e18e 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -39,13 +39,15 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis - `cli_main.py` - Entry point with CLI argument parsing and orchestration - `server.py` - FastMCP server setup and tool registration - `tools/` - LinkedIn scraping tools (person, company, job profiles) -- `drivers/browser.py` - Patchright browser management with persistent profile +- `drivers/browser.py` - Patchright browser management with persistent profile (singleton) +- `core/` - Inlined browser, auth, and utility code (replaces `linkedin_scraper` dependency) +- `scraping/` - innerText extraction engine with Flag-based section selection - `config/` - Configuration management (schema, loaders) - `authentication.py` - LinkedIn profile-based authentication **Tool Categories:** -- **Person Tools** (`tools/person.py`) - Profile scraping with contacts, interests, experiences, education +- **Person Tools** (`tools/person.py`) - Profile scraping with explicit section selection - **Company Tools** (`tools/company.py`) - Company profile and posts extraction - **Job Tools** (`tools/job.py`) - Job posting details and search functionality @@ -53,13 +55,29 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis | Tool | Description | |------|-------------| -| `get_person_profile` | Get profile with contacts (email/phone/social), interests, experiences, education | -| `get_company_profile` | Get company info with employees, affiliated companies, showcase pages | -| `get_company_posts` | Get recent posts from company feed with reactions/comments/images | -| `get_job_details` | Get job posting details including description and benefits | +| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, accomplishments, contacts) | +| `get_company_profile` | Get company info with explicit `sections` selection (posts, jobs) | +| `get_company_posts` | Get recent posts from company feed | +| `get_job_details` | Get job posting details | | `search_jobs` | Search jobs by keywords and location | | `close_session` | Close browser session and clean up resources | +**Tool Return Format:** + +All scraping tools return: `{url, sections: {name: raw_text}, pages_visited, sections_requested}` + +**Scraping Architecture (`scraping/`):** + +- `fields.py` - `PersonScrapingFields` and `CompanyScrapingFields` Flag enums +- `extractor.py` - `LinkedInExtractor` class using navigate-scroll-innerText pattern + +**Core Subpackage (`core/`):** + +- `exceptions.py` - Exception hierarchy (AuthenticationError, RateLimitError, etc.) +- `browser.py` - `BrowserManager` with persistent context and cookie import/export +- `auth.py` - `is_logged_in()`, `wait_for_manual_login()`, `warm_up_browser()` +- `utils.py` - `detect_rate_limit()`, `scroll_to_bottom()`, `handle_modal_close()` + **Authentication Flow:** - Uses persistent browser profile at `~/.linkedin-mcp/profile/` @@ -81,7 +99,6 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Key Dependencies:** - `fastmcp` - MCP server framework -- `linkedin_scraper` - LinkedIn web scraping (v3 with Patchright) - `patchright` - Anti-detection browser automation (Playwright fork) **Configuration:** @@ -123,7 +140,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. -**Available resources**: fastmcp, linkedinScraper, patchright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit +**Available resources**: fastmcp, patchright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit ### Usage diff --git a/README.md b/README.md index 5ade6f0e..5240b87f 100644 --- a/README.md +++ b/README.md @@ -40,8 +40,8 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | Tool | Description | Status | |------|-------------|--------| -| `get_person_profile` | Get detailed profile info including work history, education, contacts, interests | Working | -| `get_company_profile` | Extract company information including employees, affiliated companies | Issues | +| `get_person_profile` | Get profile info with explicit section selection (experience, education, interests, accomplishments, contacts) | Working | +| `get_company_profile` | Extract company information with explicit section selection (posts, jobs) | Working | | `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | | `search_jobs` | Search for jobs with keywords and location filters | Working | | `get_job_details` | Get detailed information about a specific job posting | Working | @@ -315,9 +315,11 @@ docker run -it --rm \ - Claude Desktop has a ~60 second connection timeout - If the Docker image isn't cached, the pull may exceed this timeout - **Fix:** Pre-pull the image before first use: + ```bash docker pull stickerdaniel/linkedin-mcp-server:2.3.0 ``` + - Then restart Claude Desktop **Docker issues:** @@ -465,7 +467,7 @@ Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-serve ## Acknowledgements -Built with [LinkedIn Scraper](https://github.com/joeyism/linkedin_scraper) by [@joeyism](https://github.com/joeyism) and [FastMCP](https://gofastmcp.com/). +Built with [FastMCP](https://gofastmcp.com/) and [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python). โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. diff --git a/btca.config.jsonc b/btca.config.jsonc index 06d78806..27c1236c 100644 --- a/btca.config.jsonc +++ b/btca.config.jsonc @@ -8,13 +8,6 @@ "branch": "main", "specialNotes": "FastMCP server framework. Primary MCP library used in this project." }, - { - "name": "linkedinScraper", - "type": "git", - "url": "https://github.com/joeyism/linkedin_scraper", - "branch": "master", - "specialNotes": "LinkedIn scraping library with Playwright support." - }, { "name": "playwright", "type": "git", diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index d87664ee..31405b87 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -8,7 +8,22 @@ from typing import Any from fastmcp import Context -from linkedin_scraper.callbacks import ProgressCallback + + +class ProgressCallback: + """Base callback class for progress tracking.""" + + async def on_start(self, scraper_type: str, url: str) -> None: + pass + + async def on_progress(self, message: str, percent: int) -> None: + pass + + async def on_complete(self, scraper_type: str, result: Any) -> None: + pass + + async def on_error(self, error: Exception) -> None: + pass class MCPContextProgressCallback(ProgressCallback): diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 8de0be90..cc687693 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -14,8 +14,7 @@ import inquirer -from linkedin_scraper import is_logged_in -from linkedin_scraper.core.exceptions import AuthenticationError, RateLimitError +from linkedin_mcp_server.core import AuthenticationError, RateLimitError, is_logged_in from linkedin_mcp_server.authentication import ( clear_profile, diff --git a/linkedin_mcp_server/core/__init__.py b/linkedin_mcp_server/core/__init__.py new file mode 100644 index 00000000..782d76f8 --- /dev/null +++ b/linkedin_mcp_server/core/__init__.py @@ -0,0 +1,31 @@ +"""Core utilities inlined from linkedin_scraper.""" + +from .auth import is_logged_in, wait_for_manual_login, warm_up_browser +from .browser import BrowserManager +from .exceptions import ( + AuthenticationError, + ElementNotFoundError, + LinkedInScraperException, + NetworkError, + ProfileNotFoundError, + RateLimitError, + ScrapingError, +) +from .utils import detect_rate_limit, handle_modal_close, scroll_to_bottom + +__all__ = [ + "AuthenticationError", + "BrowserManager", + "ElementNotFoundError", + "LinkedInScraperException", + "NetworkError", + "ProfileNotFoundError", + "RateLimitError", + "ScrapingError", + "detect_rate_limit", + "handle_modal_close", + "is_logged_in", + "scroll_to_bottom", + "wait_for_manual_login", + "warm_up_browser", +] diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py new file mode 100644 index 00000000..feecb8e0 --- /dev/null +++ b/linkedin_mcp_server/core/auth.py @@ -0,0 +1,114 @@ +"""Authentication functions for LinkedIn.""" + +import asyncio +import logging + +from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError + +from .exceptions import AuthenticationError + +logger = logging.getLogger(__name__) + + +async def warm_up_browser(page: Page) -> None: + """Visit normal sites to appear more human-like before LinkedIn access.""" + sites = [ + "https://www.google.com", + "https://www.wikipedia.org", + "https://www.github.com", + ] + + logger.info("Warming up browser by visiting normal sites...") + + for site in sites: + try: + await page.goto(site, wait_until="domcontentloaded", timeout=10000) + await asyncio.sleep(1) + logger.debug("Visited %s", site) + except Exception as e: + logger.debug("Could not visit %s: %s", site, e) + continue + + logger.info("Browser warm-up complete") + + +async def is_logged_in(page: Page) -> bool: + """Check if currently logged in to LinkedIn. + + Uses a three-tier strategy: + 1. Fail-fast on auth blocker URLs + 2. Check for navigation elements (primary) + 3. URL-based fallback for authenticated-only pages + """ + try: + current_url = page.url + + # Step 1: Fail-fast on auth blockers + auth_blockers = [ + "/login", + "/authwall", + "/checkpoint", + "/challenge", + "/uas/login", + "/uas/consumer-email-challenge", + ] + if any(pattern in current_url for pattern in auth_blockers): + return False + + # Step 2: Selector check (PRIMARY) + old_selectors = '.global-nav__primary-link, [data-control-name="nav.settings"]' + old_count = await page.locator(old_selectors).count() + + new_selectors = 'nav a[href*="/feed"], nav button:has-text("Home"), nav a[href*="/mynetwork"]' + new_count = await page.locator(new_selectors).count() + + has_nav_elements = old_count > 0 or new_count > 0 + + # Step 3: URL fallback + authenticated_only_pages = [ + "/feed", + "/mynetwork", + "/messaging", + "/notifications", + ] + is_authenticated_page = any( + pattern in current_url for pattern in authenticated_only_pages + ) + + return has_nav_elements or is_authenticated_page + except PlaywrightTimeoutError: + return False + except Exception: + logger.warning("Unexpected error checking login status", exc_info=True) + return False + + +async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: + """Wait for user to manually complete login. + + Args: + page: Patchright page object + timeout: Timeout in milliseconds (default: 5 minutes) + + Raises: + AuthenticationError: If timeout or login not completed + """ + logger.info( + "Please complete the login process manually in the browser. " + "Waiting up to 5 minutes..." + ) + + start_time = asyncio.get_event_loop().time() + + while True: + if await is_logged_in(page): + logger.info("Manual login completed successfully") + return + + elapsed = (asyncio.get_event_loop().time() - start_time) * 1000 + if elapsed > timeout: + raise AuthenticationError( + "Manual login timeout. Please try again and complete login faster." + ) + + await asyncio.sleep(1) diff --git a/linkedin_mcp_server/core/browser.py b/linkedin_mcp_server/core/browser.py new file mode 100644 index 00000000..6282bd10 --- /dev/null +++ b/linkedin_mcp_server/core/browser.py @@ -0,0 +1,230 @@ +"""Browser lifecycle management using Patchright with persistent context.""" + +import json +import logging +from pathlib import Path +from typing import Any + +from patchright.async_api import ( + BrowserContext, + Page, + Playwright, + async_playwright, +) + +from .exceptions import NetworkError + +logger = logging.getLogger(__name__) + +_DEFAULT_USER_DATA_DIR = Path.home() / ".linkedin_scraper" / "browser_data" + + +class BrowserManager: + """Async context manager for Patchright browser with persistent profile. + + Session persistence is handled automatically by the persistent browser + context -- all cookies, localStorage, and session state are retained in + the ``user_data_dir`` between runs. + """ + + def __init__( + self, + user_data_dir: str | Path = _DEFAULT_USER_DATA_DIR, + headless: bool = True, + slow_mo: int = 0, + viewport: dict[str, int] | None = None, + user_agent: str | None = None, + **launch_options: Any, + ): + self.user_data_dir = str(Path(user_data_dir).expanduser()) + self.headless = headless + self.slow_mo = slow_mo + self.viewport = viewport or {"width": 1280, "height": 720} + self.user_agent = user_agent + self.launch_options = launch_options + + self._playwright: Playwright | None = None + self._context: BrowserContext | None = None + self._page: Page | None = None + self._is_authenticated = False + + async def __aenter__(self) -> "BrowserManager": + await self.start() + return self + + async def __aexit__( + self, exc_type: object, exc_val: object, exc_tb: object + ) -> None: + await self.close() + + async def start(self) -> None: + """Start Patchright and launch persistent browser context.""" + try: + self._playwright = await async_playwright().start() + + Path(self.user_data_dir).mkdir(parents=True, exist_ok=True) + + context_options: dict[str, Any] = { + "headless": self.headless, + "slow_mo": self.slow_mo, + "viewport": self.viewport, + **self.launch_options, + } + + if self.user_agent: + context_options["user_agent"] = self.user_agent + + self._context = await self._playwright.chromium.launch_persistent_context( + self.user_data_dir, + **context_options, + ) + + logger.info( + "Persistent browser launched (headless=%s, user_data_dir=%s)", + self.headless, + self.user_data_dir, + ) + + if self._context.pages: + self._page = self._context.pages[0] + else: + self._page = await self._context.new_page() + + logger.info("Browser context and page ready") + + except Exception as e: + await self.close() + raise NetworkError(f"Failed to start browser: {e}") + + async def close(self) -> None: + """Close persistent context and cleanup resources.""" + try: + if self._context: + await self._context.close() + self._context = None + self._page = None + + if self._playwright: + await self._playwright.stop() + self._playwright = None + + logger.info("Browser closed") + + except Exception as e: + logger.error("Error closing browser: %s", e) + + @property + def page(self) -> Page: + if not self._page: + raise RuntimeError( + "Browser not started. Use async context manager or call start()." + ) + return self._page + + @property + def context(self) -> BrowserContext: + if not self._context: + raise RuntimeError("Browser context not initialized.") + return self._context + + async def set_cookie( + self, name: str, value: str, domain: str = ".linkedin.com" + ) -> None: + if not self._context: + raise RuntimeError("No browser context") + + await self._context.add_cookies( + [{"name": name, "value": value, "domain": domain, "path": "/"}] + ) + logger.debug("Cookie set: %s", name) + + @property + def is_authenticated(self) -> bool: + return self._is_authenticated + + @is_authenticated.setter + def is_authenticated(self, value: bool) -> None: + self._is_authenticated = value + + def _default_cookie_path(self) -> Path: + return Path(self.user_data_dir).parent / "cookies.json" + + @staticmethod + def _normalize_cookie_domain(cookie: Any) -> dict[str, Any]: + """Normalize cookie domain for cross-platform compatibility. + + Playwright reports some LinkedIn cookies with ``.www.linkedin.com`` + domain, but Chromium's internal store uses ``.linkedin.com``. + """ + domain = cookie.get("domain", "") + if domain in (".www.linkedin.com", "www.linkedin.com"): + cookie = {**cookie, "domain": ".linkedin.com"} + return cookie + + async def export_cookies(self, cookie_path: str | Path | None = None) -> bool: + """Export LinkedIn cookies to a portable JSON file.""" + if not self._context: + logger.warning("Cannot export cookies: no browser context") + return False + + path = Path(cookie_path) if cookie_path else self._default_cookie_path() + try: + all_cookies = await self._context.cookies() + cookies = [ + self._normalize_cookie_domain(c) + for c in all_cookies + if "linkedin.com" in c.get("domain", "") + ] + path.write_text(json.dumps(cookies, indent=2)) + logger.info("Exported %d LinkedIn cookies to %s", len(cookies), path) + return True + except Exception: + logger.exception("Failed to export cookies") + return False + + _AUTH_COOKIE_NAMES = frozenset({"li_at", "li_rm"}) + + async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: + """Import auth cookies (li_at, li_rm) from a portable JSON file.""" + if not self._context: + logger.warning("Cannot import cookies: no browser context") + return False + + path = Path(cookie_path) if cookie_path else self._default_cookie_path() + if not path.exists(): + logger.debug("No portable cookie file at %s", path) + return False + + try: + all_cookies = json.loads(path.read_text()) + if not all_cookies: + logger.debug("Cookie file is empty") + return False + + cookies = [ + self._normalize_cookie_domain(c) + for c in all_cookies + if c.get("name") in self._AUTH_COOKIE_NAMES + ] + if not cookies: + logger.warning("No auth cookies (li_at/li_rm) found in %s", path) + return False + + # Clear undecryptable cookies from the persistent store first. + await self._context.clear_cookies() + await self._context.add_cookies(cookies) # type: ignore[arg-type] + logger.info( + "Imported %d auth cookies from %s: %s", + len(cookies), + path, + ", ".join(c["name"] for c in cookies), + ) + return True + except Exception: + logger.exception("Failed to import cookies from %s", path) + return False + + def cookie_file_exists(self, cookie_path: str | Path | None = None) -> bool: + """Check if a portable cookie file exists.""" + path = Path(cookie_path) if cookie_path else self._default_cookie_path() + return path.exists() diff --git a/linkedin_mcp_server/core/exceptions.py b/linkedin_mcp_server/core/exceptions.py new file mode 100644 index 00000000..0186c8df --- /dev/null +++ b/linkedin_mcp_server/core/exceptions.py @@ -0,0 +1,45 @@ +"""Custom exceptions for LinkedIn scraping operations.""" + + +class LinkedInScraperException(Exception): + """Base exception for LinkedIn scraper.""" + + pass + + +class AuthenticationError(LinkedInScraperException): + """Raised when authentication fails.""" + + pass + + +class RateLimitError(LinkedInScraperException): + """Raised when rate limiting is detected.""" + + def __init__(self, message: str, suggested_wait_time: int = 300): + super().__init__(message) + self.suggested_wait_time = suggested_wait_time + + +class ElementNotFoundError(LinkedInScraperException): + """Raised when an expected element is not found.""" + + pass + + +class ProfileNotFoundError(LinkedInScraperException): + """Raised when a profile/page returns 404.""" + + pass + + +class NetworkError(LinkedInScraperException): + """Raised when network-related issues occur.""" + + pass + + +class ScrapingError(LinkedInScraperException): + """Raised when scraping fails for various reasons.""" + + pass diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py new file mode 100644 index 00000000..50997692 --- /dev/null +++ b/linkedin_mcp_server/core/utils.py @@ -0,0 +1,113 @@ +"""Utility functions for scraping operations.""" + +import asyncio +import logging + +from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError + +from .exceptions import RateLimitError + +logger = logging.getLogger(__name__) + + +async def detect_rate_limit(page: Page) -> None: + """Detect if LinkedIn has rate limited the session. + + Raises: + RateLimitError: If rate limiting is detected + """ + # Check URL for security challenges + current_url = page.url + if "linkedin.com/checkpoint" in current_url or "authwall" in current_url: + raise RateLimitError( + "LinkedIn security checkpoint detected. " + "You may need to verify your identity or wait before continuing.", + suggested_wait_time=3600, + ) + + # Check for CAPTCHA + try: + captcha = await page.locator( + 'iframe[title*="captcha" i], iframe[src*="captcha" i]' + ).count() + if captcha > 0: + raise RateLimitError( + "CAPTCHA challenge detected. Manual intervention required.", + suggested_wait_time=3600, + ) + except RateLimitError: + raise + except PlaywrightTimeoutError: + pass + except Exception as e: + logger.debug("Error checking for CAPTCHA: %s", e) + + # Check for rate limit messages + try: + body_text = await page.locator("body").inner_text(timeout=1000) + if body_text: + body_lower = body_text.lower() + if any( + phrase in body_lower + for phrase in [ + "too many requests", + "rate limit", + "slow down", + "try again later", + ] + ): + raise RateLimitError( + "Rate limit message detected on page.", + suggested_wait_time=1800, + ) + except RateLimitError: + raise + except PlaywrightTimeoutError: + pass + + +async def scroll_to_bottom( + page: Page, pause_time: float = 1.0, max_scrolls: int = 10 +) -> None: + """Scroll to the bottom of the page to trigger lazy loading. + + Args: + page: Patchright page object + pause_time: Time to pause between scrolls (seconds) + max_scrolls: Maximum number of scroll attempts + """ + for i in range(max_scrolls): + previous_height = await page.evaluate("document.body.scrollHeight") + await page.evaluate("window.scrollTo(0, document.body.scrollHeight)") + await asyncio.sleep(pause_time) + + new_height = await page.evaluate("document.body.scrollHeight") + if new_height == previous_height: + logger.debug("Reached bottom after %d scrolls", i + 1) + break + + +async def handle_modal_close(page: Page) -> bool: + """Close any popup modals that might be blocking content. + + Returns: + True if a modal was closed, False otherwise + """ + try: + close_button = page.locator( + 'button[aria-label="Dismiss"], ' + 'button[aria-label="Close"], ' + "button.artdeco-modal__dismiss" + ).first + + if await close_button.is_visible(timeout=1000): + await close_button.click() + await asyncio.sleep(0.5) + logger.debug("Closed modal") + return True + except PlaywrightTimeoutError: + pass + except Exception as e: + logger.debug("Error closing modal: %s", e) + + return False diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index e30efb01..cbb8c9fb 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -1,9 +1,9 @@ """ Patchright browser management for LinkedIn scraping. -This module provides async browser lifecycle management using linkedin_scraper v3's -BrowserManager with persistent context. Implements a singleton pattern for browser -reuse across tool calls with automatic profile persistence. +Provides async browser lifecycle management using BrowserManager with persistent +context. Implements a singleton pattern for browser reuse across tool calls with +automatic profile persistence. """ import logging @@ -11,12 +11,12 @@ import tempfile from pathlib import Path -from linkedin_scraper import ( +from linkedin_mcp_server.core import ( AuthenticationError, BrowserManager, + detect_rate_limit, is_logged_in, ) -from linkedin_scraper.core import detect_rate_limit from linkedin_mcp_server.config import get_config diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 73561eab..ba682ffc 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -8,7 +8,7 @@ import logging from typing import Any, Dict -from linkedin_scraper.core.exceptions import ( +from linkedin_mcp_server.core.exceptions import ( AuthenticationError, ElementNotFoundError, LinkedInScraperException, diff --git a/linkedin_mcp_server/scraping/__init__.py b/linkedin_mcp_server/scraping/__init__.py new file mode 100644 index 00000000..25ab79a8 --- /dev/null +++ b/linkedin_mcp_server/scraping/__init__.py @@ -0,0 +1,17 @@ +"""Scraping engine using innerText extraction.""" + +from .extractor import LinkedInExtractor +from .fields import ( + CompanyScrapingFields, + PersonScrapingFields, + parse_company_sections, + parse_person_sections, +) + +__all__ = [ + "CompanyScrapingFields", + "LinkedInExtractor", + "PersonScrapingFields", + "parse_company_sections", + "parse_person_sections", +] diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py new file mode 100644 index 00000000..89d020b7 --- /dev/null +++ b/linkedin_mcp_server/scraping/extractor.py @@ -0,0 +1,285 @@ +"""Core extraction engine using innerText instead of DOM selectors.""" + +import asyncio +import logging +from typing import Any +from urllib.parse import quote_plus + +from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError + +from linkedin_mcp_server.core.utils import ( + detect_rate_limit, + handle_modal_close, + scroll_to_bottom, +) + +from .fields import ( + COMPANY_SECTION_MAP, + PERSON_SECTION_MAP, + CompanyScrapingFields, + PersonScrapingFields, +) + +logger = logging.getLogger(__name__) + +# Delay between page navigations to avoid rate limiting +_NAV_DELAY = 1.0 + + +class LinkedInExtractor: + """Extracts LinkedIn page content via navigate-scroll-innerText pattern.""" + + def __init__(self, page: Page): + self._page = page + + async def extract_page(self, url: str) -> str: + """Navigate to a URL, scroll to load lazy content, and extract innerText. + + Returns empty string on failure (error isolation per section). + """ + try: + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + # Wait for main content to render + try: + await self._page.wait_for_selector("main", timeout=5000) + except PlaywrightTimeoutError: + logger.debug("No

element found on %s", url) + + # Dismiss any modals blocking content + await handle_modal_close(self._page) + + # Scroll to trigger lazy loading + await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) + + # Extract text from main content area + text = await self._page.evaluate( + """() => { + const main = document.querySelector('main'); + return main ? main.innerText : document.body.innerText; + }""" + ) + + return text.strip() if text else "" + + except Exception as e: + logger.warning("Failed to extract page %s: %s", url, e) + return "" + + async def _extract_overlay(self, url: str) -> str: + """Extract content from an overlay/modal page (e.g. contact info). + + Falls back to `.artdeco-modal__content` if `
` is empty. + """ + try: + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + # Wait for modal content + try: + await self._page.wait_for_selector( + "main, .artdeco-modal__content", timeout=5000 + ) + except PlaywrightTimeoutError: + logger.debug("No overlay content found on %s", url) + + await handle_modal_close(self._page) + + text = await self._page.evaluate( + """() => { + const main = document.querySelector('main'); + const mainText = main ? main.innerText.trim() : ''; + if (mainText) return mainText; + const modal = document.querySelector('.artdeco-modal__content'); + return modal ? modal.innerText.trim() : document.body.innerText.trim(); + }""" + ) + + return text.strip() if text else "" + + except Exception as e: + logger.warning("Failed to extract overlay %s: %s", url, e) + return "" + + async def scrape_person( + self, username: str, fields: PersonScrapingFields + ) -> dict[str, Any]: + """Scrape a person profile with configurable sections. + + Returns: + {url, sections: {name: text}, pages_visited, sections_requested} + """ + base_url = f"https://www.linkedin.com/in/{username}" + sections: dict[str, str] = {} + pages_visited: list[str] = [] + + # Map flags to (section_name, url_suffix, is_overlay) + page_map: list[tuple[PersonScrapingFields, str, str, bool]] = [ + (PersonScrapingFields.BASIC_INFO, "main_profile", "/", False), + ( + PersonScrapingFields.EXPERIENCE, + "experience", + "/details/experience/", + False, + ), + ( + PersonScrapingFields.EDUCATION, + "education", + "/details/education/", + False, + ), + ( + PersonScrapingFields.INTERESTS, + "interests", + "/details/interests/", + False, + ), + ( + PersonScrapingFields.ACCOMPLISHMENTS, + "honors", + "/details/honors/", + False, + ), + ( + PersonScrapingFields.ACCOMPLISHMENTS, + "languages", + "/details/languages/", + False, + ), + ( + PersonScrapingFields.CONTACTS, + "contacts", + "/overlay/contact-info/", + True, + ), + ] + + for flag, section_name, suffix, is_overlay in page_map: + if not (flag & fields): + continue + + url = base_url + suffix + try: + if is_overlay: + text = await self._extract_overlay(url) + else: + text = await self.extract_page(url) + + if text: + sections[section_name] = text + pages_visited.append(url) + except Exception as e: + logger.warning("Error scraping section %s: %s", section_name, e) + pages_visited.append(url) + + # Delay between navigations + await asyncio.sleep(_NAV_DELAY) + + # Build sections_requested from flags + requested = ["main_profile"] + reverse_map = {v: k for k, v in PERSON_SECTION_MAP.items()} + for flag in PersonScrapingFields: + if flag in fields and flag in reverse_map: + requested.append(reverse_map[flag]) + + return { + "url": f"{base_url}/", + "sections": sections, + "pages_visited": pages_visited, + "sections_requested": requested, + } + + async def scrape_company( + self, company_name: str, fields: CompanyScrapingFields + ) -> dict[str, Any]: + """Scrape a company profile with configurable sections. + + Returns: + {url, sections: {name: text}, pages_visited, sections_requested} + """ + base_url = f"https://www.linkedin.com/company/{company_name}" + sections: dict[str, str] = {} + pages_visited: list[str] = [] + + page_map: list[tuple[CompanyScrapingFields, str, str]] = [ + (CompanyScrapingFields.ABOUT, "about", "/about/"), + (CompanyScrapingFields.POSTS, "posts", "/posts/"), + (CompanyScrapingFields.JOBS, "jobs", "/jobs/"), + ] + + for flag, section_name, suffix in page_map: + if not (flag & fields): + continue + + url = base_url + suffix + try: + text = await self.extract_page(url) + if text: + sections[section_name] = text + pages_visited.append(url) + except Exception as e: + logger.warning("Error scraping section %s: %s", section_name, e) + pages_visited.append(url) + + await asyncio.sleep(_NAV_DELAY) + + # Build sections_requested from flags + requested = ["about"] + reverse_map = {v: k for k, v in COMPANY_SECTION_MAP.items()} + for flag in CompanyScrapingFields: + if flag in fields and flag in reverse_map: + requested.append(reverse_map[flag]) + + return { + "url": f"{base_url}/", + "sections": sections, + "pages_visited": pages_visited, + "sections_requested": requested, + } + + async def scrape_job(self, job_id: str) -> dict[str, Any]: + """Scrape a single job posting. + + Returns: + {url, sections: {name: text}, pages_visited, sections_requested} + """ + url = f"https://www.linkedin.com/jobs/view/{job_id}/" + text = await self.extract_page(url) + + sections: dict[str, str] = {} + if text: + sections["job_posting"] = text + + return { + "url": url, + "sections": sections, + "pages_visited": [url], + "sections_requested": ["job_posting"], + } + + async def search_jobs( + self, keywords: str, location: str | None = None + ) -> dict[str, Any]: + """Search for jobs and extract the results page. + + Returns: + {url, sections: {name: text}, pages_visited, sections_requested} + """ + params = f"keywords={quote_plus(keywords)}" + if location: + params += f"&location={quote_plus(location)}" + + url = f"https://www.linkedin.com/jobs/search/?{params}" + text = await self.extract_page(url) + + sections: dict[str, str] = {} + if text: + sections["search_results"] = text + + return { + "url": url, + "sections": sections, + "pages_visited": [url], + "sections_requested": ["search_results"], + } diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py new file mode 100644 index 00000000..4c0b2a25 --- /dev/null +++ b/linkedin_mcp_server/scraping/fields.py @@ -0,0 +1,67 @@ +"""Flag enums controlling which LinkedIn pages are visited during scraping.""" + +from enum import Flag, auto + + +class PersonScrapingFields(Flag): + """Controls which pages are visited when scraping a person profile.""" + + BASIC_INFO = auto() # /in/{username}/ + EXPERIENCE = auto() # /in/{username}/details/experience/ + EDUCATION = auto() # /in/{username}/details/education/ + INTERESTS = auto() # /in/{username}/details/interests/ + ACCOMPLISHMENTS = auto() # /in/{username}/details/honors/ + /details/languages/ + CONTACTS = auto() # /in/{username}/overlay/contact-info/ + + +class CompanyScrapingFields(Flag): + """Controls which pages are visited when scraping a company.""" + + ABOUT = auto() # /company/{name}/about/ + POSTS = auto() # /company/{name}/posts/ + JOBS = auto() # /company/{name}/jobs/ + + +# Section name -> flag mapping +PERSON_SECTION_MAP: dict[str, PersonScrapingFields] = { + "experience": PersonScrapingFields.EXPERIENCE, + "education": PersonScrapingFields.EDUCATION, + "interests": PersonScrapingFields.INTERESTS, + "accomplishments": PersonScrapingFields.ACCOMPLISHMENTS, + "contacts": PersonScrapingFields.CONTACTS, +} + +COMPANY_SECTION_MAP: dict[str, CompanyScrapingFields] = { + "posts": CompanyScrapingFields.POSTS, + "jobs": CompanyScrapingFields.JOBS, +} + + +def parse_person_sections(sections: str | None) -> PersonScrapingFields: + """Parse comma-separated section names into PersonScrapingFields. + + BASIC_INFO is always included. Empty/None returns BASIC_INFO only. + """ + flags = PersonScrapingFields.BASIC_INFO + if not sections: + return flags + for name in sections.split(","): + name = name.strip().lower() + if name in PERSON_SECTION_MAP: + flags |= PERSON_SECTION_MAP[name] + return flags + + +def parse_company_sections(sections: str | None) -> CompanyScrapingFields: + """Parse comma-separated section names into CompanyScrapingFields. + + ABOUT is always included. Empty/None returns ABOUT only. + """ + flags = CompanyScrapingFields.ABOUT + if not sections: + return flags + for name in sections.split(","): + name = name.strip().lower() + if name in COMPANY_SECTION_MAP: + flags |= COMPANY_SECTION_MAP[name] + return flags diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index afd5b8f1..1d19ea76 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -8,8 +8,11 @@ import asyncio from pathlib import Path -from linkedin_scraper import BrowserManager, wait_for_manual_login -from linkedin_scraper.core import warm_up_browser +from linkedin_mcp_server.core import ( + BrowserManager, + wait_for_manual_login, + warm_up_browser, +) from linkedin_mcp_server.drivers.browser import get_profile_dir diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index e0b6a512..92bf810f 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -1,34 +1,28 @@ """ LinkedIn company profile scraping tools. -Provides MCP tools for extracting company information from LinkedIn -with comprehensive error handling. +Uses innerText extraction for resilient company data capture +with configurable section selection. """ import logging from typing import Any, Dict from fastmcp import Context, FastMCP -from linkedin_scraper import CompanyPostsScraper, CompanyScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, ) from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections logger = logging.getLogger(__name__) def register_company_tools(mcp: FastMCP) -> None: - """ - Register all company-related tools with the MCP server. - - Args: - mcp: The MCP server instance - """ + """Register all company-related tools with the MCP server.""" @mcp.tool( annotations=ToolAnnotations( @@ -38,39 +32,50 @@ def register_company_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_company_profile(company_name: str, ctx: Context) -> Dict[str, Any]: + async def get_company_profile( + company_name: str, + ctx: Context, + sections: str | None = None, + ) -> Dict[str, Any]: """ Get a specific company's LinkedIn profile. Args: company_name: LinkedIn company name (e.g., "docker", "anthropic", "microsoft") ctx: FastMCP context for progress reporting + sections: Comma-separated list of extra sections to scrape. + The about page is always included. + Available sections: posts, jobs + Examples: "posts", "posts,jobs" + Default (None) scrapes only the about page. Returns: - Structured data from the company's profile including: - - linkedin_url, name, about_us, website, phone - - headquarters, founded, industry, company_type, company_size - - specialties, headcount - - showcase_pages: List of showcase pages (linkedin_url, name, followers) - - affiliated_companies: List of affiliated companies - - employees: List of employees (name, designation, linkedin_url) + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text in each section. """ try: - # Validate session before scraping await ensure_authenticated() - # Construct LinkedIn URL from company name - linkedin_url = f"https://www.linkedin.com/company/{company_name}/" + fields = parse_company_sections(sections) - logger.info(f"Scraping company: {linkedin_url}") + logger.info( + "Scraping company: %s (sections=%s)", + company_name, + sections, + ) browser = await get_or_create_browser() - scraper = CompanyScraper( - browser.page, callback=MCPContextProgressCallback(ctx) + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting company profile scrape" ) - company = await scraper.scrape(linkedin_url) - return company.to_dict() + result = await extractor.scrape_company(company_name, fields) + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return result except Exception as e: return handle_tool_error(e, "get_company_profile") @@ -84,7 +89,8 @@ async def get_company_profile(company_name: str, ctx: Context) -> Dict[str, Any] ) ) async def get_company_posts( - company_name: str, ctx: Context, limit: int = 10 + company_name: str, + ctx: Context, ) -> Dict[str, Any]: """ Get recent posts from a company's LinkedIn feed. @@ -92,34 +98,38 @@ async def get_company_posts( Args: company_name: LinkedIn company name (e.g., "docker", "anthropic", "microsoft") ctx: FastMCP context for progress reporting - limit: Maximum number of posts to return (default: 10) Returns: - Dict containing: - - count: Number of posts returned - - posts: List of post dicts with: - - linkedin_url, urn, text, posted_date - - reactions_count, comments_count, reposts_count - - image_urls: List of image URLs - - video_url: Video URL if present - - article_url: Article URL if present + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text to extract individual posts. """ try: - # Validate session before scraping await ensure_authenticated() - # Construct LinkedIn URL from company name - linkedin_url = f"https://www.linkedin.com/company/{company_name}/" - - logger.info(f"Scraping company posts: {linkedin_url} (limit: {limit})") + logger.info("Scraping company posts: %s", company_name) browser = await get_or_create_browser() - scraper = CompanyPostsScraper( - browser.page, callback=MCPContextProgressCallback(ctx) + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting company posts scrape" ) - posts = await scraper.scrape(linkedin_url, limit=limit) - return {"posts": [post.to_dict() for post in posts], "count": len(posts)} + url = f"https://www.linkedin.com/company/{company_name}/posts/" + text = await extractor.extract_page(url) + + sections: dict[str, str] = {} + if text: + sections["posts"] = text + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return { + "url": url, + "sections": sections, + "pages_visited": [url], + "sections_requested": ["posts"], + } except Exception as e: return handle_tool_error(e, "get_company_posts") diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 37de8326..c3bbf62c 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -1,34 +1,27 @@ """ LinkedIn job scraping tools with search and detail extraction. -Provides MCP tools for job posting details and job searches -with comprehensive filtering and structured data extraction. +Uses innerText extraction for resilient job data capture. """ import logging from typing import Any, Dict from fastmcp import Context, FastMCP -from linkedin_scraper import JobScraper, JobSearchScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, ) from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.scraping import LinkedInExtractor logger = logging.getLogger(__name__) def register_job_tools(mcp: FastMCP) -> None: - """ - Register all job-related tools with the MCP server. - - Args: - mcp: The MCP server instance - """ + """Register all job-related tools with the MCP server.""" @mcp.tool( annotations=ToolAnnotations( @@ -47,23 +40,26 @@ async def get_job_details(job_id: str, ctx: Context) -> Dict[str, Any]: ctx: FastMCP context for progress reporting Returns: - Structured job data including title, company, location, - posting date, and job description. + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text to extract job details. """ try: - # Validate session before scraping await ensure_authenticated() - # Construct LinkedIn URL from job ID - job_url = f"https://www.linkedin.com/jobs/view/{job_id}/" - - logger.info(f"Scraping job: {job_url}") + logger.info("Scraping job: %s", job_id) browser = await get_or_create_browser() - scraper = JobScraper(browser.page, callback=MCPContextProgressCallback(ctx)) - job = await scraper.scrape(job_url) + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting job scrape" + ) + + result = await extractor.scrape_job(job_id) + + await ctx.report_progress(progress=100, total=100, message="Complete") - return job.to_dict() + return result except Exception as e: return handle_tool_error(e, "get_job_details") @@ -80,7 +76,6 @@ async def search_jobs( keywords: str, ctx: Context, location: str | None = None, - limit: int = 25, ) -> Dict[str, Any]: """ Search for jobs on LinkedIn. @@ -89,29 +84,32 @@ async def search_jobs( keywords: Search keywords (e.g., "software engineer", "data scientist") ctx: FastMCP context for progress reporting location: Optional location filter (e.g., "San Francisco", "Remote") - limit: Maximum number of job URLs to return (default: 25) Returns: - Dict with job_urls list and count. Use get_job_details to get - full details for specific jobs. + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text to extract job listings. """ try: - # Validate session before scraping await ensure_authenticated() - logger.info(f"Searching jobs: keywords='{keywords}', location='{location}'") + logger.info( + "Searching jobs: keywords='%s', location='%s'", + keywords, + location, + ) browser = await get_or_create_browser() - scraper = JobSearchScraper( - browser.page, callback=MCPContextProgressCallback(ctx) - ) - job_urls = await scraper.search( - keywords=keywords, - location=location, - limit=limit, + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting job search" ) - return {"job_urls": job_urls, "count": len(job_urls)} + result = await extractor.search_jobs(keywords, location) + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return result except Exception as e: return handle_tool_error(e, "search_jobs") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 247ad1b7..5eef3ca0 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -1,34 +1,28 @@ """ LinkedIn person profile scraping tools. -Provides MCP tools for extracting comprehensive LinkedIn profile information including -experience, education, interests, accomplishments, and contact details. +Uses innerText extraction for resilient profile data capture +with configurable section selection. """ import logging from typing import Any, Dict from fastmcp import Context, FastMCP -from linkedin_scraper import PersonScraper from mcp.types import ToolAnnotations -from linkedin_mcp_server.callbacks import MCPContextProgressCallback from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, ) from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.scraping import LinkedInExtractor, parse_person_sections logger = logging.getLogger(__name__) def register_person_tools(mcp: FastMCP) -> None: - """ - Register all person-related tools with the MCP server. - - Args: - mcp: The MCP server instance - """ + """Register all person-related tools with the MCP server.""" @mcp.tool( annotations=ToolAnnotations( @@ -39,7 +33,9 @@ def register_person_tools(mcp: FastMCP) -> None: ) ) async def get_person_profile( - linkedin_username: str, ctx: Context + linkedin_username: str, + ctx: Context, + sections: str | None = None, ) -> Dict[str, Any]: """ Get a specific person's LinkedIn profile. @@ -47,36 +43,39 @@ async def get_person_profile( Args: linkedin_username: LinkedIn username (e.g., "stickerdaniel", "williamhgates") ctx: FastMCP context for progress reporting + sections: Comma-separated list of extra sections to scrape. + The main profile page is always included. + Available sections: experience, education, interests, accomplishments, contacts + Examples: "experience,education", "contacts", "experience,contacts" + Default (None) scrapes only the main profile page. Returns: - Structured data from the person's profile including: - - linkedin_url, name, location, about, open_to_work - - experiences: List of work history (position_title, institution_name, - linkedin_url, from_date, to_date, duration, location, description) - - educations: List of education (institution_name, degree, linkedin_url, - from_date, to_date, description) - - interests: List of interests with category (company, group, school, - newsletter, influencer) and linkedin_url - - accomplishments: List of accomplishments (category, title) - - contacts: List of contact info (type: email/phone/website/linkedin/ - twitter/birthday/address, value, label) + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text in each section. """ try: - # Validate session before scraping await ensure_authenticated() - # Construct LinkedIn URL from username - linkedin_url = f"https://www.linkedin.com/in/{linkedin_username}/" + fields = parse_person_sections(sections) - logger.info(f"Scraping profile: {linkedin_url}") + logger.info( + "Scraping profile: %s (sections=%s)", + linkedin_username, + sections, + ) browser = await get_or_create_browser() - scraper = PersonScraper( - browser.page, callback=MCPContextProgressCallback(ctx) + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting person profile scrape" ) - person = await scraper.scrape(linkedin_url) - return person.to_dict() + result = await extractor.scrape_person(linkedin_username, fields) + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return result except Exception as e: return handle_tool_error(e, "get_person_profile") diff --git a/pyproject.toml b/pyproject.toml index b3d49b46..6bbce3e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "3.0.4" +version = "4.0.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" @@ -34,7 +34,6 @@ classifiers = [ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", - "linkedin-scraper-patchright>=3.1.4", "patchright>=1.40.0", "pyperclip>=1.9.0", "python-dotenv>=1.1.1", diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 9ddca981..1b5740be 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -68,7 +68,7 @@ async def test_get_or_create_browser_auth_success(monkeypatch): @pytest.mark.asyncio async def test_get_or_create_browser_auth_failure_cleans_up(monkeypatch): """Failed auth closes browser and does NOT assign singleton.""" - from linkedin_scraper import AuthenticationError + from linkedin_mcp_server.core import AuthenticationError mock_browser = _make_mock_browser() diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py index b229a7ff..a9acad94 100644 --- a/tests/test_error_handler.py +++ b/tests/test_error_handler.py @@ -1,4 +1,4 @@ -from linkedin_scraper.core.exceptions import RateLimitError +from linkedin_mcp_server.core.exceptions import RateLimitError from linkedin_mcp_server.error_handler import handle_tool_error from linkedin_mcp_server.exceptions import ( @@ -28,7 +28,7 @@ def test_handles_generic_exception(): def test_handles_rate_limit_with_suggested_wait(): """Test RateLimitError with custom suggested_wait_time attribute.""" error = RateLimitError("Rate limited") - error.suggested_wait_time = 600 # type: ignore[attr-defined] + error.suggested_wait_time = 600 result = handle_tool_error(error, "test_tool") assert result["error"] == "rate_limit" assert result["suggested_wait_seconds"] == 600 diff --git a/tests/test_fields.py b/tests/test_fields.py new file mode 100644 index 00000000..276af54e --- /dev/null +++ b/tests/test_fields.py @@ -0,0 +1,128 @@ +"""Tests for scraping field flag enums and section parsers.""" + +from linkedin_mcp_server.scraping.fields import ( + CompanyScrapingFields, + PersonScrapingFields, + parse_company_sections, + parse_person_sections, +) + + +class TestPersonScrapingFields: + def test_atomic_flags_are_distinct(self): + flags = [ + PersonScrapingFields.BASIC_INFO, + PersonScrapingFields.EXPERIENCE, + PersonScrapingFields.EDUCATION, + PersonScrapingFields.INTERESTS, + PersonScrapingFields.ACCOMPLISHMENTS, + PersonScrapingFields.CONTACTS, + ] + for i, a in enumerate(flags): + for b in flags[i + 1 :]: + assert a & b == PersonScrapingFields(0) + + def test_flag_bitwise_or(self): + combined = PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACTS + assert PersonScrapingFields.BASIC_INFO in combined + assert PersonScrapingFields.CONTACTS in combined + assert PersonScrapingFields.EXPERIENCE not in combined + + +class TestCompanyScrapingFields: + def test_atomic_flags_are_distinct(self): + flags = [ + CompanyScrapingFields.ABOUT, + CompanyScrapingFields.POSTS, + CompanyScrapingFields.JOBS, + ] + for i, a in enumerate(flags): + for b in flags[i + 1 :]: + assert a & b == CompanyScrapingFields(0) + + +class TestParsePersonSections: + def test_none_returns_basic_info_only(self): + assert parse_person_sections(None) == PersonScrapingFields.BASIC_INFO + + def test_empty_string_returns_basic_info_only(self): + assert parse_person_sections("") == PersonScrapingFields.BASIC_INFO + + def test_single_section(self): + result = parse_person_sections("contacts") + assert result == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACTS + + def test_multiple_sections(self): + result = parse_person_sections("experience,education") + expected = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + ) + assert result == expected + + def test_invalid_names_ignored(self): + result = parse_person_sections("experience,bogus,education") + expected = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + ) + assert result == expected + + def test_whitespace_and_case_handling(self): + result = parse_person_sections(" Experience , EDUCATION ") + expected = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + ) + assert result == expected + + def test_all_sections(self): + result = parse_person_sections( + "experience,education,interests,accomplishments,contacts" + ) + expected = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + | PersonScrapingFields.INTERESTS + | PersonScrapingFields.ACCOMPLISHMENTS + | PersonScrapingFields.CONTACTS + ) + assert result == expected + + +class TestParseCompanySections: + def test_none_returns_about_only(self): + assert parse_company_sections(None) == CompanyScrapingFields.ABOUT + + def test_empty_string_returns_about_only(self): + assert parse_company_sections("") == CompanyScrapingFields.ABOUT + + def test_single_section(self): + result = parse_company_sections("posts") + assert result == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + + def test_multiple_sections(self): + result = parse_company_sections("posts,jobs") + expected = ( + CompanyScrapingFields.ABOUT + | CompanyScrapingFields.POSTS + | CompanyScrapingFields.JOBS + ) + assert result == expected + + def test_invalid_names_ignored(self): + result = parse_company_sections("posts,bogus") + assert result == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + + def test_whitespace_and_case_handling(self): + result = parse_company_sections(" Posts , JOBS ") + expected = ( + CompanyScrapingFields.ABOUT + | CompanyScrapingFields.POSTS + | CompanyScrapingFields.JOBS + ) + assert result == expected diff --git a/tests/test_scraping.py b/tests/test_scraping.py new file mode 100644 index 00000000..1194117f --- /dev/null +++ b/tests/test_scraping.py @@ -0,0 +1,292 @@ +"""Tests for the LinkedInExtractor scraping engine.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from linkedin_mcp_server.scraping.extractor import LinkedInExtractor +from linkedin_mcp_server.scraping.fields import ( + CompanyScrapingFields, + PersonScrapingFields, +) + + +@pytest.fixture +def mock_page(): + """Create a mock Patchright page.""" + page = MagicMock() + page.goto = AsyncMock() + page.wait_for_selector = AsyncMock() + page.evaluate = AsyncMock(return_value="Sample page text") + page.url = "https://www.linkedin.com/in/testuser/" + page.locator = MagicMock() + # Default: no modals, no CAPTCHA + mock_locator = MagicMock() + mock_locator.count = AsyncMock(return_value=0) + mock_locator.is_visible = AsyncMock(return_value=False) + mock_locator.first = mock_locator + mock_locator.inner_text = AsyncMock(return_value="normal page content") + page.locator.return_value = mock_locator + return page + + +class TestExtractPage: + async def test_extract_page_returns_text(self, mock_page): + mock_page.evaluate = AsyncMock( + side_effect=[ + "Sample profile text", # main.innerText + 100, # scrollHeight (first check) + None, # scrollTo + 100, # scrollHeight (unchanged = stop) + ] + ) + extractor = LinkedInExtractor(mock_page) + # Patch scroll_to_bottom and detect_rate_limit to avoid complex mock chains + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor.extract_page( + "https://www.linkedin.com/in/testuser/" + ) + + assert result == "Sample profile text" + mock_page.goto.assert_awaited_once() + + async def test_extract_page_returns_empty_on_failure(self, mock_page): + mock_page.goto = AsyncMock(side_effect=Exception("Network error")) + extractor = LinkedInExtractor(mock_page) + + result = await extractor.extract_page("https://www.linkedin.com/in/bad/") + assert result == "" + + async def test_rate_limit_detected(self, mock_page): + from linkedin_mcp_server.core.exceptions import RateLimitError + + extractor = LinkedInExtractor(mock_page) + with patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + side_effect=RateLimitError("Rate limited", suggested_wait_time=3600), + ): + # extract_page catches all exceptions and returns "" + result = await extractor.extract_page( + "https://www.linkedin.com/in/testuser/" + ) + assert result == "" + + +class TestScrapePersonUrls: + """Test that scrape_person visits the correct URLs per field combination.""" + + async def test_basic_info_only_visits_main_profile(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="profile text", + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="", + ), + ): + result = await extractor.scrape_person( + "testuser", PersonScrapingFields.BASIC_INFO + ) + + assert len(result["pages_visited"]) == 1 + assert "https://www.linkedin.com/in/testuser/" in result["pages_visited"] + assert result["sections_requested"] == ["main_profile"] + + async def test_experience_education_visits_three_pages(self, mock_page): + extractor = LinkedInExtractor(mock_page) + fields = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + ) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="text", + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="", + ), + ): + result = await extractor.scrape_person("testuser", fields) + + urls = result["pages_visited"] + assert len(urls) == 3 + assert any("/in/testuser/" in u for u in urls) + assert any("/details/experience/" in u for u in urls) + assert any("/details/education/" in u for u in urls) + assert result["sections_requested"] == [ + "main_profile", + "experience", + "education", + ] + + async def test_all_flags_visit_all_pages(self, mock_page): + extractor = LinkedInExtractor(mock_page) + fields = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + | PersonScrapingFields.INTERESTS + | PersonScrapingFields.ACCOMPLISHMENTS + | PersonScrapingFields.CONTACTS + ) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="text", + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="contact text", + ), + ): + result = await extractor.scrape_person("testuser", fields) + + urls = result["pages_visited"] + # main_profile, experience, education, interests, honors, languages, contacts + assert len(urls) == 7 + assert result["sections_requested"] == [ + "main_profile", + "experience", + "education", + "interests", + "accomplishments", + "contacts", + ] + + async def test_error_isolation(self, mock_page): + """One section failing doesn't block others.""" + call_count = 0 + + async def extract_with_failure(url): + nonlocal call_count + call_count += 1 + if "experience" in url: + raise Exception("Simulated failure") + return f"text for {url}" + + extractor = LinkedInExtractor(mock_page) + fields = ( + PersonScrapingFields.BASIC_INFO + | PersonScrapingFields.EXPERIENCE + | PersonScrapingFields.EDUCATION + ) + with ( + patch.object( + extractor, + "extract_page", + side_effect=extract_with_failure, + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="", + ), + ): + result = await extractor.scrape_person("testuser", fields) + + # All 3 pages should be visited even though experience failed + assert len(result["pages_visited"]) == 3 + # main_profile and education should have sections, experience should not + assert "main_profile" in result["sections"] + assert "education" in result["sections"] + + +class TestScrapeCompany: + async def test_about_only_visits_about(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="about text", + ): + result = await extractor.scrape_company( + "testcorp", CompanyScrapingFields.ABOUT + ) + + assert len(result["pages_visited"]) == 1 + assert any("/about/" in u for u in result["pages_visited"]) + assert result["sections_requested"] == ["about"] + + async def test_all_flags_visit_about_posts_jobs(self, mock_page): + extractor = LinkedInExtractor(mock_page) + fields = ( + CompanyScrapingFields.ABOUT + | CompanyScrapingFields.POSTS + | CompanyScrapingFields.JOBS + ) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="text", + ): + result = await extractor.scrape_company("testcorp", fields) + + assert len(result["pages_visited"]) == 3 + assert result["sections_requested"] == ["about", "posts", "jobs"] + + +class TestScrapeJob: + async def test_scrape_job(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="Job: Software Engineer", + ): + result = await extractor.scrape_job("12345") + + assert result["url"] == "https://www.linkedin.com/jobs/view/12345/" + assert "job_posting" in result["sections"] + assert result["sections_requested"] == ["job_posting"] + + async def test_search_jobs(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="Job 1\nJob 2", + ): + result = await extractor.search_jobs("python", "Remote") + + assert "keywords=python" in result["url"] + assert "location=Remote" in result["url"] + assert "search_results" in result["sections"] + assert result["sections_requested"] == ["search_results"] diff --git a/tests/test_tools.py b/tests/test_tools.py index fe9a3166..8ee0d046 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -33,17 +33,31 @@ def patch_tool_deps(monkeypatch): return mock_browser +def _make_mock_extractor(scrape_result: dict) -> MagicMock: + """Create a mock LinkedInExtractor that returns the given result.""" + mock = MagicMock() + mock.scrape_person = AsyncMock(return_value=scrape_result) + mock.scrape_company = AsyncMock(return_value=scrape_result) + mock.scrape_job = AsyncMock(return_value=scrape_result) + mock.search_jobs = AsyncMock(return_value=scrape_result) + mock.extract_page = AsyncMock(return_value="some text") + return mock + + class TestPersonTool: async def test_get_person_profile_success( self, mock_context, patch_tool_deps, monkeypatch ): - mock_person = MagicMock() - mock_person.to_dict.return_value = {"full_name": "Test User"} - mock_scraper = MagicMock() - mock_scraper.scrape = AsyncMock(return_value=mock_person) + expected = { + "url": "https://www.linkedin.com/in/test-user/", + "sections": {"main_profile": "John Doe\nSoftware Engineer"}, + "pages_visited": ["https://www.linkedin.com/in/test-user/"], + "sections_requested": ["main_profile"], + } + mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( - "linkedin_mcp_server.tools.person.PersonScraper", - lambda *a, **kw: mock_scraper, + "linkedin_mcp_server.tools.person.LinkedInExtractor", + lambda *a, **kw: mock_extractor, ) from linkedin_mcp_server.tools.person import register_person_tools @@ -53,7 +67,49 @@ async def test_get_person_profile_success( tool_fn = await get_tool_fn(mcp, "get_person_profile") result = await tool_fn("test-user", mock_context) - assert result["full_name"] == "Test User" + assert result["url"] == "https://www.linkedin.com/in/test-user/" + assert "main_profile" in result["sections"] + assert result["sections_requested"] == ["main_profile"] + + async def test_get_person_profile_with_sections( + self, mock_context, patch_tool_deps, monkeypatch + ): + """Verify sections parameter is passed through.""" + expected = { + "url": "https://www.linkedin.com/in/test-user/", + "sections": { + "main_profile": "John Doe", + "experience": "Work history", + "contacts": "Email: test@test.com", + }, + "pages_visited": [ + "https://www.linkedin.com/in/test-user/", + "https://www.linkedin.com/in/test-user/details/experience/", + "https://www.linkedin.com/in/test-user/overlay/contact-info/", + ], + "sections_requested": ["main_profile", "experience", "contacts"], + } + mock_extractor = _make_mock_extractor(expected) + monkeypatch.setattr( + "linkedin_mcp_server.tools.person.LinkedInExtractor", + lambda *a, **kw: mock_extractor, + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_person_profile") + result = await tool_fn( + "test-user", mock_context, sections="experience,contacts" + ) + assert result["sections_requested"] == [ + "main_profile", + "experience", + "contacts", + ] + mock_extractor.scrape_person.assert_awaited_once() async def test_get_person_profile_error(self, mock_context, monkeypatch): from linkedin_mcp_server.exceptions import SessionExpiredError @@ -77,13 +133,16 @@ class TestCompanyTools: async def test_get_company_profile( self, mock_context, patch_tool_deps, monkeypatch ): - mock_company = MagicMock() - mock_company.to_dict.return_value = {"name": "Test Corp"} - mock_scraper = MagicMock() - mock_scraper.scrape = AsyncMock(return_value=mock_company) + expected = { + "url": "https://www.linkedin.com/company/testcorp/", + "sections": {"about": "TestCorp\nWe build things"}, + "pages_visited": ["https://www.linkedin.com/company/testcorp/about/"], + "sections_requested": ["about"], + } + mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( - "linkedin_mcp_server.tools.company.CompanyScraper", - lambda *a, **kw: mock_scraper, + "linkedin_mcp_server.tools.company.LinkedInExtractor", + lambda *a, **kw: mock_extractor, ) from linkedin_mcp_server.tools.company import register_company_tools @@ -93,16 +152,14 @@ async def test_get_company_profile( tool_fn = await get_tool_fn(mcp, "get_company_profile") result = await tool_fn("testcorp", mock_context) - assert result["name"] == "Test Corp" + assert "about" in result["sections"] async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatch): - mock_post = MagicMock() - mock_post.to_dict.return_value = {"text": "Hello world"} - mock_scraper = MagicMock() - mock_scraper.scrape = AsyncMock(return_value=[mock_post]) + mock_extractor = MagicMock() + mock_extractor.extract_page = AsyncMock(return_value="Post 1\nPost 2") monkeypatch.setattr( - "linkedin_mcp_server.tools.company.CompanyPostsScraper", - lambda *a, **kw: mock_scraper, + "linkedin_mcp_server.tools.company.LinkedInExtractor", + lambda *a, **kw: mock_extractor, ) from linkedin_mcp_server.tools.company import register_company_tools @@ -111,19 +168,24 @@ async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatc register_company_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_company_posts") - result = await tool_fn("testcorp", mock_context, limit=5) - assert result["count"] == 1 - assert result["posts"][0]["text"] == "Hello world" + result = await tool_fn("testcorp", mock_context) + assert "posts" in result["sections"] + assert result["sections"]["posts"] == "Post 1\nPost 2" + assert result["sections_requested"] == ["posts"] class TestJobTools: async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch): - mock_job = MagicMock() - mock_job.to_dict.return_value = {"title": "Engineer"} - mock_scraper = MagicMock() - mock_scraper.scrape = AsyncMock(return_value=mock_job) + expected = { + "url": "https://www.linkedin.com/jobs/view/12345/", + "sections": {"job_posting": "Software Engineer\nGreat opportunity"}, + "pages_visited": ["https://www.linkedin.com/jobs/view/12345/"], + "sections_requested": ["job_posting"], + } + mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( - "linkedin_mcp_server.tools.job.JobScraper", lambda *a, **kw: mock_scraper + "linkedin_mcp_server.tools.job.LinkedInExtractor", + lambda *a, **kw: mock_extractor, ) from linkedin_mcp_server.tools.job import register_job_tools @@ -133,14 +195,19 @@ async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch) tool_fn = await get_tool_fn(mcp, "get_job_details") result = await tool_fn("12345", mock_context) - assert result["title"] == "Engineer" + assert "job_posting" in result["sections"] async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): - mock_scraper = MagicMock() - mock_scraper.search = AsyncMock(return_value=["url1", "url2"]) + expected = { + "url": "https://www.linkedin.com/jobs/search/?keywords=python", + "sections": {"search_results": "Job 1\nJob 2"}, + "pages_visited": ["https://www.linkedin.com/jobs/search/?keywords=python"], + "sections_requested": ["search_results"], + } + mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( - "linkedin_mcp_server.tools.job.JobSearchScraper", - lambda *a, **kw: mock_scraper, + "linkedin_mcp_server.tools.job.LinkedInExtractor", + lambda *a, **kw: mock_extractor, ) from linkedin_mcp_server.tools.job import register_job_tools @@ -149,6 +216,5 @@ async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): register_job_tools(mcp) tool_fn = await get_tool_fn(mcp, "search_jobs") - result = await tool_fn("python", mock_context, location="Remote", limit=10) - assert result["count"] == 2 - assert "url1" in result["job_urls"] + result = await tool_fn("python", mock_context, location="Remote") + assert "search_results" in result["sections"] diff --git a/uv.lock b/uv.lock index caa3c38c..ba807cb0 100644 --- a/uv.lock +++ b/uv.lock @@ -2,15 +2,6 @@ version = 1 revision = 2 requires-python = ">=3.12" -[[package]] -name = "aiofiles" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, -] - [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -1011,12 +1002,11 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "3.0.4" +version = "4.0.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, - { name = "linkedin-scraper-patchright" }, { name = "patchright" }, { name = "pyperclip" }, { name = "python-dotenv" }, @@ -1038,7 +1028,6 @@ dev = [ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, - { name = "linkedin-scraper-patchright", specifier = ">=3.1.4" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, @@ -1056,23 +1045,6 @@ dev = [ { name = "ty", specifier = ">=0.0.1a12" }, ] -[[package]] -name = "linkedin-scraper-patchright" -version = "3.1.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiofiles" }, - { name = "lxml" }, - { name = "patchright" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/39/9455b68ee039bbbeafeddc4f48d95c337d29a92d620891efcf661a294744/linkedin_scraper_patchright-3.1.4.tar.gz", hash = "sha256:e460fe79db266fd4d1dae66c42cc0fa7b13400672c9ee996f45aa624232d0fa0", size = 47736, upload-time = "2026-02-13T16:44:39.564Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/f8/61d90fdb7aaf8b63557e5bbf907b4b274ed28f646925933798f49294877e/linkedin_scraper_patchright-3.1.4-py3-none-any.whl", hash = "sha256:c5b9e76ea6b2fb01d21e2c2ad840fd4ce447a09ead552c8a5be2ab54b9ad7149", size = 54228, upload-time = "2026-02-13T16:44:37.447Z" }, -] - [[package]] name = "lupa" version = "2.6" @@ -1125,86 +1097,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" }, ] -[[package]] -name = "lxml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, - { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, - { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, - { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, - { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, - { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, - { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, - { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, - { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, - { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, - { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, - { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, - { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, - { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, - { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494, upload-time = "2025-09-22T04:01:54.242Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146, upload-time = "2025-09-22T04:01:56.282Z" }, - { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932, upload-time = "2025-09-22T04:01:58.989Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060, upload-time = "2025-09-22T04:02:00.812Z" }, - { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000, upload-time = "2025-09-22T04:02:02.671Z" }, - { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496, upload-time = "2025-09-22T04:02:04.904Z" }, - { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779, upload-time = "2025-09-22T04:02:06.689Z" }, - { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072, upload-time = "2025-09-22T04:02:08.587Z" }, - { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675, upload-time = "2025-09-22T04:02:10.783Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171, upload-time = "2025-09-22T04:02:12.631Z" }, - { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175, upload-time = "2025-09-22T04:02:14.718Z" }, - { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688, upload-time = "2025-09-22T04:02:16.957Z" }, - { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655, upload-time = "2025-09-22T04:02:18.815Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695, upload-time = "2025-09-22T04:02:20.593Z" }, - { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841, upload-time = "2025-09-22T04:02:22.489Z" }, - { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700, upload-time = "2025-09-22T04:02:24.465Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347, upload-time = "2025-09-22T04:02:26.286Z" }, - { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248, upload-time = "2025-09-22T04:02:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801, upload-time = "2025-09-22T04:02:30.113Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403, upload-time = "2025-09-22T04:02:32.119Z" }, - { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974, upload-time = "2025-09-22T04:02:34.155Z" }, - { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953, upload-time = "2025-09-22T04:02:36.054Z" }, - { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054, upload-time = "2025-09-22T04:02:38.154Z" }, - { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421, upload-time = "2025-09-22T04:02:40.413Z" }, - { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684, upload-time = "2025-09-22T04:02:42.288Z" }, - { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463, upload-time = "2025-09-22T04:02:44.165Z" }, - { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437, upload-time = "2025-09-22T04:02:46.524Z" }, - { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890, upload-time = "2025-09-22T04:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185, upload-time = "2025-09-22T04:02:50.746Z" }, - { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895, upload-time = "2025-09-22T04:02:52.968Z" }, - { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246, upload-time = "2025-09-22T04:02:54.798Z" }, - { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797, upload-time = "2025-09-22T04:02:57.058Z" }, - { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404, upload-time = "2025-09-22T04:02:58.966Z" }, - { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072, upload-time = "2025-09-22T04:03:38.05Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617, upload-time = "2025-09-22T04:03:39.835Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930, upload-time = "2025-09-22T04:03:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380, upload-time = "2025-09-22T04:03:01.645Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632, upload-time = "2025-09-22T04:03:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171, upload-time = "2025-09-22T04:03:05.651Z" }, - { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109, upload-time = "2025-09-22T04:03:07.452Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061, upload-time = "2025-09-22T04:03:09.297Z" }, - { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233, upload-time = "2025-09-22T04:03:11.651Z" }, - { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739, upload-time = "2025-09-22T04:03:13.592Z" }, - { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119, upload-time = "2025-09-22T04:03:15.408Z" }, - { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665, upload-time = "2025-09-22T04:03:17.262Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997, upload-time = "2025-09-22T04:03:19.14Z" }, - { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957, upload-time = "2025-09-22T04:03:21.436Z" }, - { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372, upload-time = "2025-09-22T04:03:23.27Z" }, - { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653, upload-time = "2025-09-22T04:03:25.767Z" }, - { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795, upload-time = "2025-09-22T04:03:27.62Z" }, - { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023, upload-time = "2025-09-22T04:03:30.056Z" }, - { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420, upload-time = "2025-09-22T04:03:32.198Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837, upload-time = "2025-09-22T04:03:34.027Z" }, - { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205, upload-time = "2025-09-22T04:03:36.249Z" }, -] - [[package]] name = "markdown-it-py" version = "4.0.0" From 0f156bab4781095f9f36a2fe9eb2de2f8f1c8661 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 11:37:45 +0100 Subject: [PATCH 378/565] refactor(cli): update command from --get-session to --login across documentation and codebase Replaced instances of --get-session with --login in .env.example, AGENTS.md, README.md, and various Python files to reflect the new command structure for creating a browser profile. Updated related documentation to ensure consistency and clarity regarding authentication processes. --- .env.example | 2 +- AGENTS.md | 4 +- README.md | 55 ++++++++++++-------------- docs/docker-hub.md | 4 +- linkedin_mcp_server/authentication.py | 4 +- linkedin_mcp_server/cli_main.py | 18 ++++----- linkedin_mcp_server/config/loaders.py | 18 ++++----- linkedin_mcp_server/config/schema.py | 6 +-- linkedin_mcp_server/drivers/browser.py | 2 +- linkedin_mcp_server/error_handler.py | 6 +-- linkedin_mcp_server/exceptions.py | 2 +- 11 files changed, 59 insertions(+), 62 deletions(-) diff --git a/.env.example b/.env.example index cb7141ea..8b17ca37 100644 --- a/.env.example +++ b/.env.example @@ -2,7 +2,7 @@ # Copy this file to .env and fill in your values # Persistent browser profile directory (default: ~/.linkedin-mcp/profile) -# Run with --get-session to create a profile via browser login +# Run with --login to create a profile via browser login USER_DATA_DIR=~/.linkedin-mcp/profile # Browser mode (default: true) diff --git a/AGENTS.md b/AGENTS.md index bc74e18e..dbe8e3f6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -25,7 +25,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co **Docker Commands:** - Build: `docker build -t linkedin-mcp-server .` -- Get session: Use uvx locally first: `uvx linkedin-scraper-mcp --get-session` +- Login: Use uvx locally first: `uvx linkedin-scraper-mcp --login` ## Architecture Overview @@ -81,7 +81,7 @@ All scraping tools return: `{url, sections: {name: raw_text}, pages_visited, sec **Authentication Flow:** - Uses persistent browser profile at `~/.linkedin-mcp/profile/` -- Run with `--get-session` to create a profile via browser login +- Run with `--login` to create a profile via browser login **Transport Modes:** diff --git a/README.md b/README.md index 5240b87f..06b1f0b8 100644 --- a/README.md +++ b/README.md @@ -47,11 +47,8 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | -> [!WARNING] -> The browser profile at `~/.linkedin-mcp/profile/` contains sensitive authentication data. Keep it secure and do not share it. - > [!IMPORTANT] -> **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--get-session` again to create a new profile + cookie file that can be mounted in docker. 02/2026 +> **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026

@@ -65,7 +62,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company **Step 1: Create a session (first time only)** ```bash -uvx linkedin-scraper-mcp --get-session +uvx linkedin-scraper-mcp --login ``` This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. @@ -77,7 +74,7 @@ uvx linkedin-scraper-mcp ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again. ### uvx Setup Help @@ -104,14 +101,14 @@ uvx linkedin-scraper-mcp **CLI Options:** -- `--get-session` - Open browser to log in and save persistent profile +- `--login` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn browser profile +- `--logout` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--chrome-path PATH` - Path to Chrome/Chromium executable (for custom browser installations) @@ -120,7 +117,7 @@ uvx linkedin-scraper-mcp ```bash # Create a session interactively -uvx linkedin-scraper-mcp --get-session +uvx linkedin-scraper-mcp --login # Run with debug logging uvx linkedin-scraper-mcp --log-level DEBUG @@ -158,8 +155,8 @@ uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 808 **Login issues:** -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve it manually. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve it manually. **Timeout issues:** @@ -188,7 +185,7 @@ Docker runs headless (no browser window), so you need to create a browser profil **Step 1: Create profile using uvx (one-time setup)** ```bash -uvx linkedin-scraper-mcp --get-session +uvx linkedin-scraper-mcp --login ``` This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. @@ -211,10 +208,10 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again locally. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again locally. > [!NOTE] -> **Why can't I run `--get-session` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. +> **Why can't I run `--login` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. ### Docker Setup Help @@ -233,13 +230,13 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn browser profile +- `--logout` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--chrome-path PATH` - Path to Chrome/Chromium executable (rarely needed in Docker) > [!NOTE] -> `--get-session` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create profiles. +> `--login` and `--no-headless` are not available in Docker (no display server). Use the [uvx setup](#-uvx-setup-recommended---universal) to create profiles. **HTTP Mode Example (for web-based MCP clients):** @@ -273,8 +270,8 @@ docker run -it --rm \ **Login issues:** - Make sure you have only one active LinkedIn session at a time -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** @@ -300,10 +297,10 @@ docker run -it --rm \ 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install into Claude Desktop -3. Create a session: `uvx linkedin-scraper-mcp --get-session` +3. Create a session: `uvx linkedin-scraper-mcp --login` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --get-session` again. +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again. ### DXT Extension Setup Help @@ -330,8 +327,8 @@ docker run -it --rm \ **Login issues:** - Make sure you have only one active LinkedIn session at a time -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --get-session` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. **Timeout issues:** @@ -369,7 +366,7 @@ uv run patchright install chromium uv run pre-commit install # 6. Create a session (first time only) -uv run -m linkedin_mcp_server --get-session +uv run -m linkedin_mcp_server --login # 7. Start the server uv run -m linkedin_mcp_server @@ -382,16 +379,16 @@ uv run -m linkedin_mcp_server **CLI Options:** -- `--get-session` - Open browser to log in and save persistent profile +- `--login` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) - `--transport {stdio,streamable-http}` - Set transport mode - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--clear-session` - Clear stored LinkedIn browser profile +- `--logout` - Clear stored LinkedIn browser profile - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) -- `--session-info` - Check if current session is valid and exit +- `--status` - Check if current session is valid and exit - `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--slow-mo MS` - Delay between browser actions in milliseconds (default: 0, useful for debugging) - `--user-agent STRING` - Custom browser user agent @@ -428,8 +425,8 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por **Login issues:** - Make sure you have only one active LinkedIn session at a time -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--get-session` -- You might get a captcha challenge if you logged in frequently. The `--get-session` command opens a browser where you can solve it manually. +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` +- You might get a captcha challenge if you logged in frequently. The `--login` command opens a browser where you can solve it manually. **Scraping issues:** @@ -439,7 +436,7 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por **Session issues:** - Browser profile is stored at `~/.linkedin-mcp/profile/` -- Use `--clear-session` to clear the profile and start fresh +- Use `--logout` to clear the profile and start fresh **Python/Patchright issues:** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 6f9cea6c..bfe90568 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -17,7 +17,7 @@ Create a browser profile locally, then mount it into Docker. **Step 1: Create profile using uvx (one-time setup)** ```bash -uvx linkedin-scraper-mcp --get-session +uvx linkedin-scraper-mcp --login ``` **Step 2: Configure Claude Desktop with Docker** @@ -37,7 +37,7 @@ uvx linkedin-scraper-mcp --get-session } ``` -> **Note:** Docker containers don't have a display server, so you can't use the `--get-session` command in Docker. Create a profile on your host first. +> **Note:** Docker containers don't have a display server, so you can't use the `--login` command in Docker. Create a profile on your host first. ## Environment Variables diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 0f79c80c..9027d2dd 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -35,10 +35,10 @@ def get_authentication_source() -> bool: raise CredentialsNotFoundError( "No LinkedIn authentication found.\n\n" "Options:\n" - " 1. Run with --get-session to create a browser profile (recommended)\n" + " 1. Run with --login to create a browser profile (recommended)\n" " 2. Run with --no-headless to login interactively\n\n" "For Docker users:\n" - " Create profile on host first: uvx linkedin-scraper-mcp --get-session\n" + " Create profile on host first: uvx linkedin-scraper-mcp --login\n" " Then mount into Docker: -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp" ) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index cc687693..6d7bded6 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -133,7 +133,7 @@ def profile_info_and_exit() -> None: profile_dir = get_profile_dir() if not profile_exists(profile_dir): print(f"โŒ No browser profile found at {profile_dir}") - print(" Run with --get-session to create a profile") + print(" Run with --login to create a profile") sys.exit(1) # Check if session is valid by testing login status @@ -163,7 +163,7 @@ async def check_session() -> bool: sys.exit(0) else: print(f"โŒ Session expired or invalid (profile: {profile_dir})") - print(" Run with --get-session to re-authenticate") + print(" Run with --login to re-authenticate") sys.exit(1) @@ -192,7 +192,7 @@ def ensure_authentication_ready() -> None: raise CredentialsNotFoundError( "No LinkedIn profile found.\n" "Options:\n" - " 1. Run with --get-session to create a profile\n" + " 1. Run with --login to create a profile\n" " 2. Run with --no-headless to login interactively" ) @@ -244,16 +244,16 @@ def main() -> None: # Set headless mode from config set_headless(config.browser.headless) - # Handle --clear-session flag - if config.server.clear_session: + # Handle --logout flag + if config.server.logout: clear_profile_and_exit() - # Handle --get-session flag - if config.server.get_session: + # Handle --login flag + if config.server.login: get_profile_and_exit() - # Handle --session-info flag - if config.server.session_info: + # Handle --status flag + if config.server.status: profile_info_and_exit() logger.debug(f"Server configuration: {config}") diff --git a/linkedin_mcp_server/config/loaders.py b/linkedin_mcp_server/config/loaders.py index 2680a59d..e3c86133 100644 --- a/linkedin_mcp_server/config/loaders.py +++ b/linkedin_mcp_server/config/loaders.py @@ -238,19 +238,19 @@ def load_from_args(config: AppConfig) -> AppConfig: # Session management parser.add_argument( - "--get-session", + "--login", action="store_true", help="Login interactively via browser and save persistent profile", ) parser.add_argument( - "--session-info", + "--status", action="store_true", help="Check if current session is valid and exit", ) parser.add_argument( - "--clear-session", + "--logout", action="store_true", help="Clear stored LinkedIn browser profile", ) @@ -310,14 +310,14 @@ def load_from_args(config: AppConfig) -> AppConfig: config.browser.chrome_path = args.chrome_path # Session management - if args.get_session: - config.server.get_session = True + if args.login: + config.server.login = True - if args.session_info: - config.server.session_info = True + if args.status: + config.server.status = True - if args.clear_session: - config.server.clear_session = True + if args.logout: + config.server.logout = True if args.user_data_dir: config.browser.user_data_dir = args.user_data_dir diff --git a/linkedin_mcp_server/config/schema.py b/linkedin_mcp_server/config/schema.py index d7d2c5f2..01fd37cf 100644 --- a/linkedin_mcp_server/config/schema.py +++ b/linkedin_mcp_server/config/schema.py @@ -60,9 +60,9 @@ class ServerConfig: transport: Literal["stdio", "streamable-http"] = "stdio" transport_explicitly_set: bool = False log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "WARNING" - get_session: bool = False - session_info: bool = False # Check session validity and exit - clear_session: bool = False + login: bool = False + status: bool = False # Check session validity and exit + logout: bool = False # HTTP transport configuration host: str = "127.0.0.1" port: int = 8000 diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index cbb8c9fb..d7849bd8 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -144,7 +144,7 @@ async def get_or_create_browser( # Auth failed โ€” clean up and fail fast await browser.close() raise AuthenticationError( - "No authentication found. Run with --get-session to create a profile." + "No authentication found. Run with --login to create a profile." ) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index ba682ffc..a6fbb581 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -58,21 +58,21 @@ def convert_exception_to_response( return { "error": "authentication_not_found", "message": str(exception), - "resolution": "Run with --get-session to create a browser profile.", + "resolution": "Run with --login to create a browser profile.", } elif isinstance(exception, SessionExpiredError): return { "error": "session_expired", "message": str(exception), - "resolution": "Run with --get-session to create a new browser profile.", + "resolution": "Run with --login to create a new browser profile.", } elif isinstance(exception, AuthenticationError): return { "error": "authentication_failed", "message": str(exception), - "resolution": "Run with --get-session to re-authenticate.", + "resolution": "Run with --login to re-authenticate.", } elif isinstance(exception, RateLimitError): diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py index 25c38ca2..b06b49d8 100644 --- a/linkedin_mcp_server/exceptions.py +++ b/linkedin_mcp_server/exceptions.py @@ -26,6 +26,6 @@ def __init__(self, message: str | None = None): default_msg = ( "LinkedIn session has expired.\n\n" "To fix this:\n" - " Run with --get-session to create a new session" + " Run with --login to create a new session" ) super().__init__(message or default_msg) From 02004a60cc16556560756a861e8b193d02d36d67 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 12:45:23 +0100 Subject: [PATCH 379/565] feat(scraper): enhance LinkedIn scraping functionality and error handling Updated the LinkedIn scraping tools to improve noise filtering and retry logic for rate-limited pages. Introduced a new function to strip LinkedIn page noise, ensuring cleaner content extraction. Enhanced error logging for unknown section names in scraping fields and adjusted the default user data directory for browser management. Updated tests to cover new functionality and ensure robust error handling. --- .gitignore | 3 + AGENTS.md | 3 +- linkedin_mcp_server/callbacks.py | 4 +- linkedin_mcp_server/core/__init__.py | 2 +- linkedin_mcp_server/core/auth.py | 4 +- linkedin_mcp_server/core/browser.py | 6 +- linkedin_mcp_server/scraping/extractor.py | 153 ++++++++++++++++------ linkedin_mcp_server/scraping/fields.py | 31 ++++- linkedin_mcp_server/tools/company.py | 6 +- linkedin_mcp_server/tools/job.py | 6 +- linkedin_mcp_server/tools/person.py | 9 +- scripts/dump_snapshots.py | 87 ++++++++++++ tests/test_fields.py | 23 ++-- tests/test_scraping.py | 153 ++++++++++++++++++++-- tests/test_tools.py | 8 +- 15 files changed, 413 insertions(+), 85 deletions(-) create mode 100644 scripts/dump_snapshots.py diff --git a/.gitignore b/.gitignore index 7559fe2e..d8eea1ea 100644 --- a/.gitignore +++ b/.gitignore @@ -202,3 +202,6 @@ cython_debug/ # Portable cookie file (contains session data) cookies.json + +# Local snapshot dumps (contain scraped LinkedIn data) +scripts/snapshot_dumps/ diff --git a/AGENTS.md b/AGENTS.md index dbe8e3f6..e54ded43 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -55,7 +55,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis | Tool | Description | |------|-------------| -| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, accomplishments, contacts) | +| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, honors, languages, contact_info) | | `get_company_profile` | Get company info with explicit `sections` selection (posts, jobs) | | `get_company_posts` | Get recent posts from company feed | | `get_job_details` | Get job posting details | @@ -70,6 +70,7 @@ All scraping tools return: `{url, sections: {name: raw_text}, pages_visited, sec - `fields.py` - `PersonScrapingFields` and `CompanyScrapingFields` Flag enums - `extractor.py` - `LinkedInExtractor` class using navigate-scroll-innerText pattern +- **One flag = one navigation.** Each `PersonScrapingFields` / `CompanyScrapingFields` flag must map to exactly one page navigation. Never combine multiple URLs behind a single flag. **Core Subpackage (`core/`):** diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index 31405b87..71e26197 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -1,8 +1,8 @@ """ Progress callbacks for MCP tools. -Provides callback implementations that log progress for LinkedIn scraping operations -and report progress to MCP clients via FastMCP Context. +Provides callback implementations that report progress for LinkedIn scraping +operations to MCP clients via FastMCP Context. """ from typing import Any diff --git a/linkedin_mcp_server/core/__init__.py b/linkedin_mcp_server/core/__init__.py index 782d76f8..a0a9a326 100644 --- a/linkedin_mcp_server/core/__init__.py +++ b/linkedin_mcp_server/core/__init__.py @@ -1,4 +1,4 @@ -"""Core utilities inlined from linkedin_scraper.""" +"""Core browser management, authentication, and scraping utilities.""" from .auth import is_logged_in, wait_for_manual_login, warm_up_browser from .browser import BrowserManager diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index feecb8e0..8a5b5043 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -79,8 +79,8 @@ async def is_logged_in(page: Page) -> bool: except PlaywrightTimeoutError: return False except Exception: - logger.warning("Unexpected error checking login status", exc_info=True) - return False + logger.error("Unexpected error checking login status", exc_info=True) + raise async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: diff --git a/linkedin_mcp_server/core/browser.py b/linkedin_mcp_server/core/browser.py index 6282bd10..9c4ba754 100644 --- a/linkedin_mcp_server/core/browser.py +++ b/linkedin_mcp_server/core/browser.py @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) -_DEFAULT_USER_DATA_DIR = Path.home() / ".linkedin_scraper" / "browser_data" +_DEFAULT_USER_DATA_DIR = Path.home() / ".linkedin-mcp" / "profile" class BrowserManager: @@ -59,6 +59,8 @@ async def __aexit__( async def start(self) -> None: """Start Patchright and launch persistent browser context.""" + if self._context is not None: + raise RuntimeError("Browser already started. Call close() first.") try: self._playwright = await async_playwright().start() @@ -94,7 +96,7 @@ async def start(self) -> None: except Exception as e: await self.close() - raise NetworkError(f"Failed to start browser: {e}") + raise NetworkError(f"Failed to start browser: {e}") from e async def close(self) -> None: """Close persistent context and cleanup resources.""" diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 89d020b7..3389e1fe 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -2,11 +2,13 @@ import asyncio import logging +import re from typing import Any from urllib.parse import quote_plus from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError +from linkedin_mcp_server.core.exceptions import LinkedInScraperException from linkedin_mcp_server.core.utils import ( detect_rate_limit, handle_modal_close, @@ -23,7 +25,40 @@ logger = logging.getLogger(__name__) # Delay between page navigations to avoid rate limiting -_NAV_DELAY = 1.0 +_NAV_DELAY = 2.0 + +# Backoff before retrying a rate-limited page +_RATE_LIMIT_RETRY_DELAY = 5.0 + +# Returned as section text when LinkedIn rate-limits the page +_RATE_LIMITED_MSG = "[Rate limited] LinkedIn blocked this section. Try again later or request fewer sections." + +# Patterns that mark the start of LinkedIn page chrome (sidebar/footer). +# Everything from the earliest match onwards is stripped. +_NOISE_MARKERS: list[re.Pattern[str]] = [ + # Footer nav links: "About" immediately followed by "Accessibility" or "Talent Solutions" + re.compile(r"^About\n+(?:Accessibility|Talent Solutions)", re.MULTILINE), + # Sidebar profile recommendations + re.compile(r"^More profiles for you$", re.MULTILINE), + # Sidebar premium upsell + re.compile(r"^Explore premium profiles$", re.MULTILINE), + # InMail upsell in contact info overlay + re.compile(r"^Get up to .+ replies when you message with InMail$", re.MULTILINE), +] + + +def strip_linkedin_noise(text: str) -> str: + """Remove LinkedIn page chrome (footer, sidebar recommendations) from innerText. + + Finds the earliest occurrence of any known noise marker and truncates there. + """ + earliest = len(text) + for pattern in _NOISE_MARKERS: + match = pattern.search(text) + if match and match.start() < earliest: + earliest = match.start() + + return text[:earliest].strip() class LinkedInExtractor: @@ -35,69 +70,109 @@ def __init__(self, page: Page): async def extract_page(self, url: str) -> str: """Navigate to a URL, scroll to load lazy content, and extract innerText. + Retries once after a backoff when the page returns only LinkedIn chrome + (sidebar/footer noise with no actual content), which indicates a soft + rate limit. + Returns empty string on failure (error isolation per section). """ try: - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) - await detect_rate_limit(self._page) + result = await self._extract_page_once(url) + if result != _RATE_LIMITED_MSG: + return result - # Wait for main content to render - try: - await self._page.wait_for_selector("main", timeout=5000) - except PlaywrightTimeoutError: - logger.debug("No
element found on %s", url) + # Retry once after backoff + logger.info("Retrying %s after %.0fs backoff", url, _RATE_LIMIT_RETRY_DELAY) + await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) + return await self._extract_page_once(url) + + except LinkedInScraperException: + raise + except Exception as e: + logger.warning("Failed to extract page %s: %s", url, e) + return "" - # Dismiss any modals blocking content - await handle_modal_close(self._page) + async def _extract_page_once(self, url: str) -> str: + """Single attempt to navigate, scroll, and extract innerText.""" + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) - # Scroll to trigger lazy loading - await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) + # Wait for main content to render + try: + await self._page.wait_for_selector("main", timeout=5000) + except PlaywrightTimeoutError: + logger.debug("No
element found on %s", url) - # Extract text from main content area - text = await self._page.evaluate( - """() => { - const main = document.querySelector('main'); - return main ? main.innerText : document.body.innerText; - }""" - ) + # Dismiss any modals blocking content + await handle_modal_close(self._page) - return text.strip() if text else "" + # Scroll to trigger lazy loading + await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) - except Exception as e: - logger.warning("Failed to extract page %s: %s", url, e) + # Extract text from main content area + raw = await self._page.evaluate( + """() => { + const main = document.querySelector('main'); + return main ? main.innerText : document.body.innerText; + }""" + ) + + if not raw: return "" + cleaned = strip_linkedin_noise(raw) + if not cleaned and raw.strip(): + logger.warning( + "Page %s returned only LinkedIn chrome (likely rate-limited)", url + ) + return _RATE_LIMITED_MSG + return cleaned async def _extract_overlay(self, url: str) -> str: """Extract content from an overlay/modal page (e.g. contact info). - Falls back to `.artdeco-modal__content` if `
` is empty. + LinkedIn renders contact info as a native element. + Falls back to `
` if no dialog is found. """ try: await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) await detect_rate_limit(self._page) - # Wait for modal content + # Wait for the dialog/modal to render (LinkedIn uses native ) try: await self._page.wait_for_selector( - "main, .artdeco-modal__content", timeout=5000 + "dialog[open], .artdeco-modal__content", timeout=5000 ) except PlaywrightTimeoutError: - logger.debug("No overlay content found on %s", url) + logger.debug("No modal overlay found on %s, falling back to main", url) - await handle_modal_close(self._page) + # NOTE: Do NOT call handle_modal_close() here โ€” the contact-info + # overlay *is* a dialog/modal. Dismissing it would destroy the + # content before the JS evaluation below can read it. - text = await self._page.evaluate( + raw = await self._page.evaluate( """() => { - const main = document.querySelector('main'); - const mainText = main ? main.innerText.trim() : ''; - if (mainText) return mainText; + const dialog = document.querySelector('dialog[open]'); + if (dialog) return dialog.innerText.trim(); const modal = document.querySelector('.artdeco-modal__content'); - return modal ? modal.innerText.trim() : document.body.innerText.trim(); + if (modal) return modal.innerText.trim(); + const main = document.querySelector('main'); + return main ? main.innerText.trim() : document.body.innerText.trim(); }""" ) - return text.strip() if text else "" + if not raw: + return "" + cleaned = strip_linkedin_noise(raw) + if not cleaned and raw.strip(): + logger.warning( + "Overlay %s returned only LinkedIn chrome (likely rate-limited)", + url, + ) + return _RATE_LIMITED_MSG + return cleaned + except LinkedInScraperException: + raise except Exception as e: logger.warning("Failed to extract overlay %s: %s", url, e) return "" @@ -136,20 +211,20 @@ async def scrape_person( False, ), ( - PersonScrapingFields.ACCOMPLISHMENTS, + PersonScrapingFields.HONORS, "honors", "/details/honors/", False, ), ( - PersonScrapingFields.ACCOMPLISHMENTS, + PersonScrapingFields.LANGUAGES, "languages", "/details/languages/", False, ), ( - PersonScrapingFields.CONTACTS, - "contacts", + PersonScrapingFields.CONTACT_INFO, + "contact_info", "/overlay/contact-info/", True, ), @@ -169,6 +244,8 @@ async def scrape_person( if text: sections[section_name] = text pages_visited.append(url) + except LinkedInScraperException: + raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) pages_visited.append(url) @@ -218,6 +295,8 @@ async def scrape_company( if text: sections[section_name] = text pages_visited.append(url) + except LinkedInScraperException: + raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) pages_visited.append(url) diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py index 4c0b2a25..f54fe6db 100644 --- a/linkedin_mcp_server/scraping/fields.py +++ b/linkedin_mcp_server/scraping/fields.py @@ -1,7 +1,10 @@ """Flag enums controlling which LinkedIn pages are visited during scraping.""" +import logging from enum import Flag, auto +logger = logging.getLogger(__name__) + class PersonScrapingFields(Flag): """Controls which pages are visited when scraping a person profile.""" @@ -10,8 +13,9 @@ class PersonScrapingFields(Flag): EXPERIENCE = auto() # /in/{username}/details/experience/ EDUCATION = auto() # /in/{username}/details/education/ INTERESTS = auto() # /in/{username}/details/interests/ - ACCOMPLISHMENTS = auto() # /in/{username}/details/honors/ + /details/languages/ - CONTACTS = auto() # /in/{username}/overlay/contact-info/ + HONORS = auto() # /in/{username}/details/honors/ + LANGUAGES = auto() # /in/{username}/details/languages/ + CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ class CompanyScrapingFields(Flag): @@ -27,8 +31,9 @@ class CompanyScrapingFields(Flag): "experience": PersonScrapingFields.EXPERIENCE, "education": PersonScrapingFields.EDUCATION, "interests": PersonScrapingFields.INTERESTS, - "accomplishments": PersonScrapingFields.ACCOMPLISHMENTS, - "contacts": PersonScrapingFields.CONTACTS, + "honors": PersonScrapingFields.HONORS, + "languages": PersonScrapingFields.LANGUAGES, + "contact_info": PersonScrapingFields.CONTACT_INFO, } COMPANY_SECTION_MAP: dict[str, CompanyScrapingFields] = { @@ -41,14 +46,23 @@ def parse_person_sections(sections: str | None) -> PersonScrapingFields: """Parse comma-separated section names into PersonScrapingFields. BASIC_INFO is always included. Empty/None returns BASIC_INFO only. + Unknown section names are logged as warnings. """ flags = PersonScrapingFields.BASIC_INFO if not sections: return flags for name in sections.split(","): name = name.strip().lower() + if not name: + continue if name in PERSON_SECTION_MAP: flags |= PERSON_SECTION_MAP[name] + else: + logger.warning( + "Unknown person section %r ignored. Valid: %s", + name, + ", ".join(sorted(PERSON_SECTION_MAP)), + ) return flags @@ -56,12 +70,21 @@ def parse_company_sections(sections: str | None) -> CompanyScrapingFields: """Parse comma-separated section names into CompanyScrapingFields. ABOUT is always included. Empty/None returns ABOUT only. + Unknown section names are logged as warnings. """ flags = CompanyScrapingFields.ABOUT if not sections: return flags for name in sections.split(","): name = name.strip().lower() + if not name: + continue if name in COMPANY_SECTION_MAP: flags |= COMPANY_SECTION_MAP[name] + else: + logger.warning( + "Unknown company section %r ignored. Valid: %s", + name, + ", ".join(sorted(COMPANY_SECTION_MAP)), + ) return flags diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 92bf810f..faea87e2 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, Dict +from typing import Any from fastmcp import Context, FastMCP from mcp.types import ToolAnnotations @@ -36,7 +36,7 @@ async def get_company_profile( company_name: str, ctx: Context, sections: str | None = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Get a specific company's LinkedIn profile. @@ -91,7 +91,7 @@ async def get_company_profile( async def get_company_posts( company_name: str, ctx: Context, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Get recent posts from a company's LinkedIn feed. diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index c3bbf62c..3eadf552 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -5,7 +5,7 @@ """ import logging -from typing import Any, Dict +from typing import Any from fastmcp import Context, FastMCP from mcp.types import ToolAnnotations @@ -31,7 +31,7 @@ def register_job_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_job_details(job_id: str, ctx: Context) -> Dict[str, Any]: + async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: """ Get job details for a specific job posting on LinkedIn. @@ -76,7 +76,7 @@ async def search_jobs( keywords: str, ctx: Context, location: str | None = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Search for jobs on LinkedIn. diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 5eef3ca0..473dce23 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, Dict +from typing import Any from fastmcp import Context, FastMCP from mcp.types import ToolAnnotations @@ -36,7 +36,7 @@ async def get_person_profile( linkedin_username: str, ctx: Context, sections: str | None = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Get a specific person's LinkedIn profile. @@ -45,12 +45,13 @@ async def get_person_profile( ctx: FastMCP context for progress reporting sections: Comma-separated list of extra sections to scrape. The main profile page is always included. - Available sections: experience, education, interests, accomplishments, contacts - Examples: "experience,education", "contacts", "experience,contacts" + Available sections: experience, education, interests, honors, languages, contact_info + Examples: "experience,education", "contact_info", "honors,languages" Default (None) scrapes only the main profile page. Returns: Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Sections may be absent if extraction yielded no content for that page. The LLM should parse the raw text in each section. """ try: diff --git a/scripts/dump_snapshots.py b/scripts/dump_snapshots.py new file mode 100644 index 00000000..69773943 --- /dev/null +++ b/scripts/dump_snapshots.py @@ -0,0 +1,87 @@ +"""Dump LinkedIn scraper output as timestamped local snapshots. + +Uses the same code paths as production (parse_person_sections / parse_company_sections). + +Run: uv run python scripts/dump_snapshots.py +""" + +import asyncio +import json +import sys +from datetime import datetime +from pathlib import Path + +# Add project root to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from linkedin_mcp_server.drivers.browser import ( + close_browser, + ensure_authenticated, + get_or_create_browser, + set_headless, +) +from linkedin_mcp_server.scraping import ( + LinkedInExtractor, + parse_company_sections, + parse_person_sections, +) + +OUTPUT_DIR = Path(__file__).parent / "snapshot_dumps" + +# Targets using the same section strings as prod tool calls +PERSON_TARGETS: list[tuple[str, str]] = [ + ("williamhgates", "experience,education,interests,honors,languages,contact_info"), + ("anistji", "experience,education,honors,languages,contact_info"), +] + +COMPANY_TARGETS: list[tuple[str, str]] = [ + ("anthropicresearch", "posts,jobs"), +] + + +async def main(): + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + run_dir = OUTPUT_DIR / timestamp + run_dir.mkdir(parents=True, exist_ok=True) + + set_headless(True) + + try: + await ensure_authenticated() + browser = await get_or_create_browser() + extractor = LinkedInExtractor(browser.page) + + for username, sections_str in PERSON_TARGETS: + print(f"\n--- Scraping person: {username} (sections: {sections_str}) ---") + fields = parse_person_sections(sections_str) + result = await extractor.scrape_person(username, fields) + + dump_path = run_dir / f"person_{username}.json" + dump_path.write_text(json.dumps(result, indent=2, ensure_ascii=False)) + + for section_name, text in result["sections"].items(): + txt_path = run_dir / f"person_{username}_{section_name}.txt" + txt_path.write_text(text) + print(f" {section_name}: {len(text)} chars") + + for company, sections_str in COMPANY_TARGETS: + print(f"\n--- Scraping company: {company} (sections: {sections_str}) ---") + fields = parse_company_sections(sections_str) + result = await extractor.scrape_company(company, fields) + + dump_path = run_dir / f"company_{company}.json" + dump_path.write_text(json.dumps(result, indent=2, ensure_ascii=False)) + + for section_name, text in result["sections"].items(): + txt_path = run_dir / f"company_{company}_{section_name}.txt" + txt_path.write_text(text) + print(f" {section_name}: {len(text)} chars") + + finally: + await close_browser() + + print(f"\nโœ… Snapshots saved to {run_dir}/") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/tests/test_fields.py b/tests/test_fields.py index 276af54e..cf0a961f 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -15,17 +15,18 @@ def test_atomic_flags_are_distinct(self): PersonScrapingFields.EXPERIENCE, PersonScrapingFields.EDUCATION, PersonScrapingFields.INTERESTS, - PersonScrapingFields.ACCOMPLISHMENTS, - PersonScrapingFields.CONTACTS, + PersonScrapingFields.HONORS, + PersonScrapingFields.LANGUAGES, + PersonScrapingFields.CONTACT_INFO, ] for i, a in enumerate(flags): for b in flags[i + 1 :]: assert a & b == PersonScrapingFields(0) def test_flag_bitwise_or(self): - combined = PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACTS + combined = PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO assert PersonScrapingFields.BASIC_INFO in combined - assert PersonScrapingFields.CONTACTS in combined + assert PersonScrapingFields.CONTACT_INFO in combined assert PersonScrapingFields.EXPERIENCE not in combined @@ -49,8 +50,11 @@ def test_empty_string_returns_basic_info_only(self): assert parse_person_sections("") == PersonScrapingFields.BASIC_INFO def test_single_section(self): - result = parse_person_sections("contacts") - assert result == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACTS + result = parse_person_sections("contact_info") + assert ( + result + == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO + ) def test_multiple_sections(self): result = parse_person_sections("experience,education") @@ -81,15 +85,16 @@ def test_whitespace_and_case_handling(self): def test_all_sections(self): result = parse_person_sections( - "experience,education,interests,accomplishments,contacts" + "experience,education,interests,honors,languages,contact_info" ) expected = ( PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE | PersonScrapingFields.EDUCATION | PersonScrapingFields.INTERESTS - | PersonScrapingFields.ACCOMPLISHMENTS - | PersonScrapingFields.CONTACTS + | PersonScrapingFields.HONORS + | PersonScrapingFields.LANGUAGES + | PersonScrapingFields.CONTACT_INFO ) assert result == expected diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 1194117f..7fb97562 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -4,7 +4,11 @@ import pytest -from linkedin_mcp_server.scraping.extractor import LinkedInExtractor +from linkedin_mcp_server.scraping.extractor import ( + LinkedInExtractor, + _RATE_LIMITED_MSG, + strip_linkedin_noise, +) from linkedin_mcp_server.scraping.fields import ( CompanyScrapingFields, PersonScrapingFields, @@ -75,16 +79,92 @@ async def test_rate_limit_detected(self, mock_page): from linkedin_mcp_server.core.exceptions import RateLimitError extractor = LinkedInExtractor(mock_page) - with patch( - "linkedin_mcp_server.scraping.extractor.detect_rate_limit", - new_callable=AsyncMock, - side_effect=RateLimitError("Rate limited", suggested_wait_time=3600), + with ( + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + side_effect=RateLimitError("Rate limited", suggested_wait_time=3600), + ), + pytest.raises(RateLimitError), + ): + await extractor.extract_page("https://www.linkedin.com/in/testuser/") + + async def test_returns_rate_limited_msg_after_retry(self, mock_page): + """When both attempts return only noise, surface rate limit message.""" + noise_only = ( + "More profiles for you\n\n" + "You've approached your profile search limit\n\n" + "About\nAccessibility\nTalent Solutions" + ) + mock_page.evaluate = AsyncMock(return_value=noise_only) + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): - # extract_page catches all exceptions and returns "" result = await extractor.extract_page( - "https://www.linkedin.com/in/testuser/" + "https://www.linkedin.com/in/testuser/details/experience/" ) - assert result == "" + + assert result == _RATE_LIMITED_MSG + # goto called twice (initial + retry) + assert mock_page.goto.await_count == 2 + + async def test_retry_succeeds_after_rate_limit(self, mock_page): + """When first attempt is rate-limited but retry succeeds, return content.""" + noise_only = "More profiles for you\n\nAbout\nAccessibility\nTalent Solutions" + call_count = 0 + + async def evaluate_side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + # First two calls are from first attempt (goto triggers evaluate via + # _extract_page_once), return noise. Third+ calls return real content. + if call_count <= 1: + return noise_only + return "Education\nHarvard University\n1973 โ€“ 1975" + + mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.extract_page( + "https://www.linkedin.com/in/testuser/details/education/" + ) + + assert result == "Education\nHarvard University\n1973 โ€“ 1975" class TestScrapePersonUrls: @@ -155,8 +235,9 @@ async def test_all_flags_visit_all_pages(self, mock_page): | PersonScrapingFields.EXPERIENCE | PersonScrapingFields.EDUCATION | PersonScrapingFields.INTERESTS - | PersonScrapingFields.ACCOMPLISHMENTS - | PersonScrapingFields.CONTACTS + | PersonScrapingFields.HONORS + | PersonScrapingFields.LANGUAGES + | PersonScrapingFields.CONTACT_INFO ) with ( patch.object( @@ -175,15 +256,16 @@ async def test_all_flags_visit_all_pages(self, mock_page): result = await extractor.scrape_person("testuser", fields) urls = result["pages_visited"] - # main_profile, experience, education, interests, honors, languages, contacts + # main_profile, experience, education, interests, honors, languages, contact_info assert len(urls) == 7 assert result["sections_requested"] == [ "main_profile", "experience", "education", "interests", - "accomplishments", - "contacts", + "honors", + "languages", + "contact_info", ] async def test_error_isolation(self, mock_page): @@ -290,3 +372,48 @@ async def test_search_jobs(self, mock_page): assert "location=Remote" in result["url"] assert "search_results" in result["sections"] assert result["sections_requested"] == ["search_results"] + + +class TestStripLinkedInNoise: + def test_strips_footer(self): + text = "Bill Gates\nChair, Gates Foundation\n\nAbout\nAccessibility\nTalent Solutions\nCareers" + assert strip_linkedin_noise(text) == "Bill Gates\nChair, Gates Foundation" + + def test_strips_footer_with_talent_solutions_variant(self): + text = "Profile content here\n\nAbout\nTalent Solutions\nMore footer" + assert strip_linkedin_noise(text) == "Profile content here" + + def test_strips_sidebar_recommendations(self): + text = "Experience\nCo-chair\nGates Foundation\n\nMore profiles for you\nSundar Pichai\nCEO at Google" + assert strip_linkedin_noise(text) == "Experience\nCo-chair\nGates Foundation" + + def test_strips_premium_upsell(self): + text = "Education\nHarvard University\n\nExplore premium profiles\nRandom Person\nSoftware Engineer" + assert strip_linkedin_noise(text) == "Education\nHarvard University" + + def test_picks_earliest_marker(self): + text = "Content\n\nExplore premium profiles\nStuff\n\nMore profiles for you\nMore stuff\n\nAbout\nAccessibility" + assert strip_linkedin_noise(text) == "Content" + + def test_no_noise_returns_unchanged(self): + text = "Clean content with no LinkedIn chrome" + assert strip_linkedin_noise(text) == "Clean content with no LinkedIn chrome" + + def test_empty_string(self): + assert strip_linkedin_noise("") == "" + + def test_about_in_profile_content_not_stripped(self): + """'About' followed by actual content (not 'Accessibility') should be preserved.""" + text = "About\nChair of the Gates Foundation.\n\nFeatured\nPost" + assert ( + strip_linkedin_noise(text) + == "About\nChair of the Gates Foundation.\n\nFeatured\nPost" + ) + + def test_real_footer_with_languages(self): + text = ( + "Company info\n\n" + "About\nAccessibility\nTalent Solutions\nCareers\n" + "Select language\nEnglish (English)\nDeutsch (German)" + ) + assert strip_linkedin_noise(text) == "Company info" diff --git a/tests/test_tools.py b/tests/test_tools.py index 8ee0d046..6d9ee3aa 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -80,14 +80,14 @@ async def test_get_person_profile_with_sections( "sections": { "main_profile": "John Doe", "experience": "Work history", - "contacts": "Email: test@test.com", + "contact_info": "Email: test@test.com", }, "pages_visited": [ "https://www.linkedin.com/in/test-user/", "https://www.linkedin.com/in/test-user/details/experience/", "https://www.linkedin.com/in/test-user/overlay/contact-info/", ], - "sections_requested": ["main_profile", "experience", "contacts"], + "sections_requested": ["main_profile", "experience", "contact_info"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -102,12 +102,12 @@ async def test_get_person_profile_with_sections( tool_fn = await get_tool_fn(mcp, "get_person_profile") result = await tool_fn( - "test-user", mock_context, sections="experience,contacts" + "test-user", mock_context, sections="experience,contact_info" ) assert result["sections_requested"] == [ "main_profile", "experience", - "contacts", + "contact_info", ] mock_extractor.scrape_person.assert_awaited_once() From bd8d455fc40da2733b0d9a48af4a0a6b63dcf8be Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 14:39:18 +0100 Subject: [PATCH 380/565] feat(scraper): enhance section parsing and error reporting in LinkedIn tools Updated the section parsing functions for person and company profiles to return unknown section names alongside the parsed flags. Improved error handling in various components, including enhanced logging for specific exceptions and better reporting of browser warm-up failures. Adjusted the README to reflect changes in the `get_person_profile` tool description. Updated tests to ensure coverage of new functionality and error handling improvements. --- README.md | 2 +- linkedin_mcp_server/callbacks.py | 4 + linkedin_mcp_server/core/auth.py | 11 +- linkedin_mcp_server/core/browser.py | 7 +- linkedin_mcp_server/core/utils.py | 25 +++- linkedin_mcp_server/error_handler.py | 16 ++- linkedin_mcp_server/scraping/extractor.py | 6 +- linkedin_mcp_server/scraping/fields.py | 30 +++-- linkedin_mcp_server/tools/company.py | 5 +- linkedin_mcp_server/tools/person.py | 5 +- scripts/dump_snapshots.py | 4 +- tests/test_core_utils.py | 147 ++++++++++++++++++++++ tests/test_fields.py | 95 ++++++++++---- tests/test_scraping.py | 42 +++++++ 14 files changed, 352 insertions(+), 47 deletions(-) create mode 100644 tests/test_core_utils.py diff --git a/README.md b/README.md index 06b1f0b8..cbe41a6d 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | Tool | Description | Status | |------|-------------|--------| -| `get_person_profile` | Get profile info with explicit section selection (experience, education, interests, accomplishments, contacts) | Working | +| `get_person_profile` | Get profile info with explicit section selection (experience, education, interests, honors, languages, contact_info) | Working | | `get_company_profile` | Extract company information with explicit section selection (posts, jobs) | Working | | `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | | `search_jobs` | Search for jobs with keywords and location filters | Working | diff --git a/linkedin_mcp_server/callbacks.py b/linkedin_mcp_server/callbacks.py index 71e26197..be087a85 100644 --- a/linkedin_mcp_server/callbacks.py +++ b/linkedin_mcp_server/callbacks.py @@ -45,3 +45,7 @@ async def on_progress(self, message: str, percent: int) -> None: async def on_complete(self, scraper_type: str, result: Any) -> None: """Report completion to MCP client.""" await self.ctx.report_progress(progress=100, total=100, message="Complete") + + async def on_error(self, error: Exception) -> None: + """Report error to MCP client.""" + await self.ctx.report_progress(progress=0, total=100, message=f"Error: {error}") diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 8a5b5043..a4fb20b1 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -20,16 +20,21 @@ async def warm_up_browser(page: Page) -> None: logger.info("Warming up browser by visiting normal sites...") + failures = 0 for site in sites: try: await page.goto(site, wait_until="domcontentloaded", timeout=10000) await asyncio.sleep(1) logger.debug("Visited %s", site) except Exception as e: + failures += 1 logger.debug("Could not visit %s: %s", site, e) continue - logger.info("Browser warm-up complete") + if failures == len(sites): + logger.warning("Browser warm-up failed: none of %d sites reachable", len(sites)) + else: + logger.info("Browser warm-up complete") async def is_logged_in(page: Page) -> bool: @@ -77,6 +82,10 @@ async def is_logged_in(page: Page) -> bool: return has_nav_elements or is_authenticated_page except PlaywrightTimeoutError: + logger.warning( + "Timeout checking login status on %s โ€” treating as not logged in", + page.url, + ) return False except Exception: logger.error("Unexpected error checking login status", exc_info=True) diff --git a/linkedin_mcp_server/core/browser.py b/linkedin_mcp_server/core/browser.py index 9c4ba754..f6778586 100644 --- a/linkedin_mcp_server/core/browser.py +++ b/linkedin_mcp_server/core/browser.py @@ -187,7 +187,12 @@ async def export_cookies(self, cookie_path: str | Path | None = None) -> bool: _AUTH_COOKIE_NAMES = frozenset({"li_at", "li_rm"}) async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: - """Import auth cookies (li_at, li_rm) from a portable JSON file.""" + """Import auth cookies (li_at, li_rm) from a portable JSON file. + + Clears all existing browser cookies before importing to avoid + undecryptable cookie conflicts in the persistent store. + Only li_at and li_rm cookies are imported; others are ignored. + """ if not self._context: logger.warning("Cannot import cookies: no browser context") return False diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 50997692..eb52e623 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -11,10 +11,20 @@ async def detect_rate_limit(page: Page) -> None: - """Detect if LinkedIn has rate limited the session. + """Detect if LinkedIn has rate-limited or security-challenged the session. + + Checks (in order): + 1. URL contains /checkpoint or /authwall (security challenge) + 2. Page contains CAPTCHA iframe (bot detection) + 3. Body text contains rate-limit phrases on error-shaped pages (throttling) + + The body-text heuristic only runs on pages without a ``
`` element + and with short body text (<2000 chars), since real rate-limit pages are + minimal error pages. This avoids false positives from profile content + that happens to contain phrases like "slow down" or "try again later". Raises: - RateLimitError: If rate limiting is detected + RateLimitError: If any rate-limiting or security challenge is detected """ # Check URL for security challenges current_url = page.url @@ -42,10 +52,17 @@ async def detect_rate_limit(page: Page) -> None: except Exception as e: logger.debug("Error checking for CAPTCHA: %s", e) - # Check for rate limit messages + # Check for rate limit messages โ€” only on error-shaped pages. + # Real rate-limit pages have no
element and short body text. + # Normal LinkedIn pages (profiles, jobs) have
and long content + # that may incidentally contain phrases like "slow down". try: + has_main = await page.locator("main").count() > 0 + if has_main: + return # Normal page with content, skip body text heuristic + body_text = await page.locator("body").inner_text(timeout=1000) - if body_text: + if body_text and len(body_text) < 2000: body_lower = body_text.lower() if any( phrase in body_lower diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index a6fbb581..2db417e7 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -55,6 +55,7 @@ def convert_exception_to_response( Structured error response dictionary """ if isinstance(exception, CredentialsNotFoundError): + logger.warning("Credentials not found in %s: %s", context, exception) return { "error": "authentication_not_found", "message": str(exception), @@ -62,6 +63,7 @@ def convert_exception_to_response( } elif isinstance(exception, SessionExpiredError): + logger.warning("Session expired in %s: %s", context, exception) return { "error": "session_expired", "message": str(exception), @@ -69,6 +71,7 @@ def convert_exception_to_response( } elif isinstance(exception, AuthenticationError): + logger.warning("Authentication failed in %s: %s", context, exception) return { "error": "authentication_failed", "message": str(exception), @@ -77,6 +80,7 @@ def convert_exception_to_response( elif isinstance(exception, RateLimitError): wait_time = getattr(exception, "suggested_wait_time", 300) + logger.warning("Rate limit in %s: %s (wait=%ds)", context, exception, wait_time) return { "error": "rate_limit", "message": str(exception), @@ -85,6 +89,7 @@ def convert_exception_to_response( } elif isinstance(exception, ProfileNotFoundError): + logger.warning("Profile not found in %s: %s", context, exception) return { "error": "profile_not_found", "message": str(exception), @@ -92,6 +97,7 @@ def convert_exception_to_response( } elif isinstance(exception, ElementNotFoundError): + logger.warning("Element not found in %s: %s", context, exception) return { "error": "element_not_found", "message": str(exception), @@ -99,6 +105,7 @@ def convert_exception_to_response( } elif isinstance(exception, NetworkError): + logger.warning("Network error in %s: %s", context, exception) return { "error": "network_error", "message": str(exception), @@ -106,6 +113,7 @@ def convert_exception_to_response( } elif isinstance(exception, ScrapingError): + logger.warning("Scraping error in %s: %s", context, exception) return { "error": "scraping_error", "message": str(exception), @@ -113,21 +121,25 @@ def convert_exception_to_response( } elif isinstance(exception, LinkedInScraperException): + logger.warning("Scraper error in %s: %s", context, exception) return { "error": "linkedin_scraper_error", "message": str(exception), } elif isinstance(exception, LinkedInMCPError): + logger.warning("MCP error in %s: %s", context, exception) return { "error": "linkedin_mcp_error", "message": str(exception), } else: - # Generic error handling with structured logging logger.error( - f"Error in {context}: {exception}", + "Unexpected error in %s: %s", + context, + exception, + exc_info=True, extra={ "context": context, "exception_type": type(exception).__name__, diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 3389e1fe..9dd6e253 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -74,7 +74,9 @@ async def extract_page(self, url: str) -> str: (sidebar/footer noise with no actual content), which indicates a soft rate limit. - Returns empty string on failure (error isolation per section). + Raises LinkedInScraperException subclasses (rate limit, auth, etc.). + Returns _RATE_LIMITED_MSG sentinel when soft-rate-limited after retry. + Returns empty string for unexpected non-domain failures (error isolation). """ try: result = await self._extract_page_once(url) @@ -185,6 +187,7 @@ async def scrape_person( Returns: {url, sections: {name: text}, pages_visited, sections_requested} """ + fields |= PersonScrapingFields.BASIC_INFO base_url = f"https://www.linkedin.com/in/{username}" sections: dict[str, str] = {} pages_visited: list[str] = [] @@ -275,6 +278,7 @@ async def scrape_company( Returns: {url, sections: {name: text}, pages_visited, sections_requested} """ + fields |= CompanyScrapingFields.ABOUT base_url = f"https://www.linkedin.com/company/{company_name}" sections: dict[str, str] = {} pages_visited: list[str] = [] diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py index f54fe6db..0315dd00 100644 --- a/linkedin_mcp_server/scraping/fields.py +++ b/linkedin_mcp_server/scraping/fields.py @@ -42,15 +42,21 @@ class CompanyScrapingFields(Flag): } -def parse_person_sections(sections: str | None) -> PersonScrapingFields: +def parse_person_sections( + sections: str | None, +) -> tuple[PersonScrapingFields, list[str]]: """Parse comma-separated section names into PersonScrapingFields. BASIC_INFO is always included. Empty/None returns BASIC_INFO only. - Unknown section names are logged as warnings. + Unknown section names are logged as warnings and returned. + + Returns: + Tuple of (flags, unknown_section_names). """ flags = PersonScrapingFields.BASIC_INFO + unknown: list[str] = [] if not sections: - return flags + return flags, unknown for name in sections.split(","): name = name.strip().lower() if not name: @@ -58,23 +64,30 @@ def parse_person_sections(sections: str | None) -> PersonScrapingFields: if name in PERSON_SECTION_MAP: flags |= PERSON_SECTION_MAP[name] else: + unknown.append(name) logger.warning( "Unknown person section %r ignored. Valid: %s", name, ", ".join(sorted(PERSON_SECTION_MAP)), ) - return flags + return flags, unknown -def parse_company_sections(sections: str | None) -> CompanyScrapingFields: +def parse_company_sections( + sections: str | None, +) -> tuple[CompanyScrapingFields, list[str]]: """Parse comma-separated section names into CompanyScrapingFields. ABOUT is always included. Empty/None returns ABOUT only. - Unknown section names are logged as warnings. + Unknown section names are logged as warnings and returned. + + Returns: + Tuple of (flags, unknown_section_names). """ flags = CompanyScrapingFields.ABOUT + unknown: list[str] = [] if not sections: - return flags + return flags, unknown for name in sections.split(","): name = name.strip().lower() if not name: @@ -82,9 +95,10 @@ def parse_company_sections(sections: str | None) -> CompanyScrapingFields: if name in COMPANY_SECTION_MAP: flags |= COMPANY_SECTION_MAP[name] else: + unknown.append(name) logger.warning( "Unknown company section %r ignored. Valid: %s", name, ", ".join(sorted(COMPANY_SECTION_MAP)), ) - return flags + return flags, unknown diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index faea87e2..42aa4241 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -56,7 +56,7 @@ async def get_company_profile( try: await ensure_authenticated() - fields = parse_company_sections(sections) + fields, unknown = parse_company_sections(sections) logger.info( "Scraping company: %s (sections=%s)", @@ -73,6 +73,9 @@ async def get_company_profile( result = await extractor.scrape_company(company_name, fields) + if unknown: + result["unknown_sections"] = unknown + await ctx.report_progress(progress=100, total=100, message="Complete") return result diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 473dce23..f0caf4d6 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -57,7 +57,7 @@ async def get_person_profile( try: await ensure_authenticated() - fields = parse_person_sections(sections) + fields, unknown = parse_person_sections(sections) logger.info( "Scraping profile: %s (sections=%s)", @@ -74,6 +74,9 @@ async def get_person_profile( result = await extractor.scrape_person(linkedin_username, fields) + if unknown: + result["unknown_sections"] = unknown + await ctx.report_progress(progress=100, total=100, message="Complete") return result diff --git a/scripts/dump_snapshots.py b/scripts/dump_snapshots.py index 69773943..fce54cfc 100644 --- a/scripts/dump_snapshots.py +++ b/scripts/dump_snapshots.py @@ -53,7 +53,7 @@ async def main(): for username, sections_str in PERSON_TARGETS: print(f"\n--- Scraping person: {username} (sections: {sections_str}) ---") - fields = parse_person_sections(sections_str) + fields, _ = parse_person_sections(sections_str) result = await extractor.scrape_person(username, fields) dump_path = run_dir / f"person_{username}.json" @@ -66,7 +66,7 @@ async def main(): for company, sections_str in COMPANY_TARGETS: print(f"\n--- Scraping company: {company} (sections: {sections_str}) ---") - fields = parse_company_sections(sections_str) + fields, _ = parse_company_sections(sections_str) result = await extractor.scrape_company(company, fields) dump_path = run_dir / f"company_{company}.json" diff --git a/tests/test_core_utils.py b/tests/test_core_utils.py new file mode 100644 index 00000000..be87f8ee --- /dev/null +++ b/tests/test_core_utils.py @@ -0,0 +1,147 @@ +"""Tests for core utility functions (rate-limit detection, scrolling, modals).""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from linkedin_mcp_server.core.exceptions import RateLimitError +from linkedin_mcp_server.core.utils import detect_rate_limit + + +@pytest.fixture +def mock_page(): + """Create a mock Patchright page for rate-limit tests.""" + page = MagicMock() + page.url = "https://www.linkedin.com/in/testuser/details/experience/" + + mock_locator = MagicMock() + mock_locator.count = AsyncMock(return_value=0) + mock_locator.inner_text = AsyncMock(return_value="") + page.locator = MagicMock(return_value=mock_locator) + return page + + +class TestDetectRateLimit: + async def test_checkpoint_url_raises(self, mock_page): + mock_page.url = "https://www.linkedin.com/checkpoint/challenge/123" + with pytest.raises(RateLimitError, match="security checkpoint"): + await detect_rate_limit(mock_page) + + async def test_authwall_url_raises(self, mock_page): + mock_page.url = "https://www.linkedin.com/authwall?trk=login" + with pytest.raises(RateLimitError, match="security checkpoint"): + await detect_rate_limit(mock_page) + + async def test_captcha_iframe_raises(self, mock_page): + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=1) + + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=0) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + return main_locator + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + with pytest.raises(RateLimitError, match="CAPTCHA"): + await detect_rate_limit(mock_page) + + async def test_normal_page_with_main_skips_body_heuristic(self, mock_page): + """A normal page with
should NOT trigger body text checks.""" + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=1) + + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=0) + + body_locator = MagicMock() + # Body contains a phrase that would false-positive + body_locator.inner_text = AsyncMock( + return_value="Helping SaaS teams slow down churn with data-driven retention" + ) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + if selector == "main": + return main_locator + if selector == "body": + return body_locator + return MagicMock(count=AsyncMock(return_value=0)) + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + # Should NOT raise โ€” the page has
, so body heuristic is skipped + await detect_rate_limit(mock_page) + + async def test_error_page_without_main_triggers_heuristic(self, mock_page): + """A short error page without
with rate-limit text should raise.""" + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=0) + + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=0) + + body_locator = MagicMock() + body_locator.inner_text = AsyncMock( + return_value="Too many requests. Slow down." + ) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + if selector == "main": + return main_locator + if selector == "body": + return body_locator + return MagicMock(count=AsyncMock(return_value=0)) + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + with pytest.raises(RateLimitError, match="Rate limit message"): + await detect_rate_limit(mock_page) + + async def test_long_body_without_main_does_not_trigger(self, mock_page): + """A page without
but with long body text (>2000 chars) is not an error page.""" + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=0) + + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=0) + + body_locator = MagicMock() + # Long body with a matching phrase buried in content + body_locator.inner_text = AsyncMock( + return_value="x" * 2000 + " try again later" + ) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + if selector == "main": + return main_locator + if selector == "body": + return body_locator + return MagicMock(count=AsyncMock(return_value=0)) + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + # Should NOT raise โ€” body is too long to be an error page + await detect_rate_limit(mock_page) + + async def test_normal_url_no_captcha_no_error_passes(self, mock_page): + """A clean normal page passes all checks without raising.""" + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=1) + + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=0) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + if selector == "main": + return main_locator + return MagicMock(count=AsyncMock(return_value=0)) + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + await detect_rate_limit(mock_page) diff --git a/tests/test_fields.py b/tests/test_fields.py index cf0a961f..f7d9977b 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1,6 +1,8 @@ """Tests for scraping field flag enums and section parsers.""" from linkedin_mcp_server.scraping.fields import ( + COMPANY_SECTION_MAP, + PERSON_SECTION_MAP, CompanyScrapingFields, PersonScrapingFields, parse_company_sections, @@ -44,47 +46,61 @@ def test_atomic_flags_are_distinct(self): class TestParsePersonSections: def test_none_returns_basic_info_only(self): - assert parse_person_sections(None) == PersonScrapingFields.BASIC_INFO + flags, unknown = parse_person_sections(None) + assert flags == PersonScrapingFields.BASIC_INFO + assert unknown == [] def test_empty_string_returns_basic_info_only(self): - assert parse_person_sections("") == PersonScrapingFields.BASIC_INFO + flags, unknown = parse_person_sections("") + assert flags == PersonScrapingFields.BASIC_INFO + assert unknown == [] def test_single_section(self): - result = parse_person_sections("contact_info") + flags, unknown = parse_person_sections("contact_info") assert ( - result - == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO + flags == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO ) + assert unknown == [] def test_multiple_sections(self): - result = parse_person_sections("experience,education") + flags, unknown = parse_person_sections("experience,education") expected = ( PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE | PersonScrapingFields.EDUCATION ) - assert result == expected + assert flags == expected + assert unknown == [] - def test_invalid_names_ignored(self): - result = parse_person_sections("experience,bogus,education") + def test_invalid_names_returned(self): + flags, unknown = parse_person_sections("experience,bogus,education") expected = ( PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE | PersonScrapingFields.EDUCATION ) - assert result == expected + assert flags == expected + assert unknown == ["bogus"] + + def test_multiple_invalid_names(self): + flags, unknown = parse_person_sections("experience,foo,bar") + assert ( + flags == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE + ) + assert unknown == ["foo", "bar"] def test_whitespace_and_case_handling(self): - result = parse_person_sections(" Experience , EDUCATION ") + flags, unknown = parse_person_sections(" Experience , EDUCATION ") expected = ( PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE | PersonScrapingFields.EDUCATION ) - assert result == expected + assert flags == expected + assert unknown == [] def test_all_sections(self): - result = parse_person_sections( + flags, unknown = parse_person_sections( "experience,education,interests,honors,languages,contact_info" ) expected = ( @@ -96,38 +112,67 @@ def test_all_sections(self): | PersonScrapingFields.LANGUAGES | PersonScrapingFields.CONTACT_INFO ) - assert result == expected + assert flags == expected + assert unknown == [] class TestParseCompanySections: def test_none_returns_about_only(self): - assert parse_company_sections(None) == CompanyScrapingFields.ABOUT + flags, unknown = parse_company_sections(None) + assert flags == CompanyScrapingFields.ABOUT + assert unknown == [] def test_empty_string_returns_about_only(self): - assert parse_company_sections("") == CompanyScrapingFields.ABOUT + flags, unknown = parse_company_sections("") + assert flags == CompanyScrapingFields.ABOUT + assert unknown == [] def test_single_section(self): - result = parse_company_sections("posts") - assert result == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + flags, unknown = parse_company_sections("posts") + assert flags == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + assert unknown == [] def test_multiple_sections(self): - result = parse_company_sections("posts,jobs") + flags, unknown = parse_company_sections("posts,jobs") expected = ( CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS | CompanyScrapingFields.JOBS ) - assert result == expected + assert flags == expected + assert unknown == [] - def test_invalid_names_ignored(self): - result = parse_company_sections("posts,bogus") - assert result == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + def test_invalid_names_returned(self): + flags, unknown = parse_company_sections("posts,bogus") + assert flags == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + assert unknown == ["bogus"] def test_whitespace_and_case_handling(self): - result = parse_company_sections(" Posts , JOBS ") + flags, unknown = parse_company_sections(" Posts , JOBS ") expected = ( CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS | CompanyScrapingFields.JOBS ) - assert result == expected + assert flags == expected + assert unknown == [] + + +class TestSectionMapCoverage: + """Ensure every non-baseline flag has a section map entry (drift risk).""" + + def test_person_section_map_covers_all_flags(self): + baseline = PersonScrapingFields.BASIC_INFO + mapped_flags = set(PERSON_SECTION_MAP.values()) + for flag in PersonScrapingFields: + if flag is baseline: + continue + assert flag in mapped_flags, f"{flag.name} missing from PERSON_SECTION_MAP" + + def test_company_section_map_covers_all_flags(self): + baseline = CompanyScrapingFields.ABOUT + mapped_flags = set(COMPANY_SECTION_MAP.values()) + for flag in CompanyScrapingFields: + if flag is baseline: + continue + assert flag in mapped_flags, f"{flag.name} missing from COMPANY_SECTION_MAP" diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 7fb97562..7493e153 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -170,6 +170,31 @@ async def evaluate_side_effect(*args, **kwargs): class TestScrapePersonUrls: """Test that scrape_person visits the correct URLs per field combination.""" + async def test_baseline_always_included(self, mock_page): + """Passing EXPERIENCE without BASIC_INFO still visits main profile.""" + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="text", + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="", + ), + ): + result = await extractor.scrape_person( + "testuser", PersonScrapingFields.EXPERIENCE + ) + + urls = result["pages_visited"] + assert any("/in/testuser/" in u for u in urls), "main profile should be visited" + assert any("/details/experience/" in u for u in urls) + async def test_basic_info_only_visits_main_profile(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( @@ -308,6 +333,23 @@ async def extract_with_failure(url): class TestScrapeCompany: + async def test_company_baseline_always_included(self, mock_page): + """Passing POSTS without ABOUT still visits about page.""" + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="text", + ): + result = await extractor.scrape_company( + "testcorp", CompanyScrapingFields.POSTS + ) + + urls = result["pages_visited"] + assert any("/about/" in u for u in urls), "about page should be visited" + assert any("/posts/" in u for u in urls) + async def test_about_only_visits_about(self, mock_page): extractor = LinkedInExtractor(mock_page) with patch.object( From e17334b2eeba649fe9be6115a7d942f8f7bc0d12 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 16 Feb 2026 13:41:32 +0000 Subject: [PATCH 381/565] chore(deps): update ci dependencies --- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 2 +- Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 752ccaaf..c704cda5 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@6c61301d8e1ee91bef7b65172f93462bbb216394 # v1 + uses: anthropics/claude-code-action@68cfeead1890300cc87935dbe2c023825be87b8a # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a963d597..0af0a9ec 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -116,7 +116,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: Build and push Docker images - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6 + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 with: context: . push: true diff --git a/Dockerfile b/Dockerfile index 323e0ef9..89412a74 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM python:3.14-slim-bookworm@sha256:f0540d0436a220db0a576ccfe75631ab072391e43a24b88972ef9833f699095f # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:78a7ff97cd27b7124a5f3c2aefe146170793c56a1e03321dd31a289f6d82a04f /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:7a88d4c4e6f44200575000638453a5a381db0ae31ad5c3a51b14f8687c9d93a3 /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From f950882498e92822a6052f846faaaa00c8d16f26 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 14:51:46 +0100 Subject: [PATCH 382/565] feat(scraper): improve overlay extraction and retry logic in LinkedInExtractor Refactored the overlay extraction process to include a dedicated method for a single extraction attempt, enhancing clarity and maintainability. Implemented a retry mechanism with backoff for handling rate-limited responses, ensuring more robust content retrieval from LinkedIn overlays. Updated logging to provide better insights during the extraction process. --- linkedin_mcp_server/scraping/extractor.py | 85 ++++++++++++++--------- 1 file changed, 51 insertions(+), 34 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 9dd6e253..10998b88 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -134,44 +134,22 @@ async def _extract_overlay(self, url: str) -> str: LinkedIn renders contact info as a native element. Falls back to `
` if no dialog is found. + + Retries once after a backoff when the overlay returns only LinkedIn + chrome (noise), mirroring `extract_page` behavior. """ try: - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) - await detect_rate_limit(self._page) + result = await self._extract_overlay_once(url) + if result != _RATE_LIMITED_MSG: + return result - # Wait for the dialog/modal to render (LinkedIn uses native ) - try: - await self._page.wait_for_selector( - "dialog[open], .artdeco-modal__content", timeout=5000 - ) - except PlaywrightTimeoutError: - logger.debug("No modal overlay found on %s, falling back to main", url) - - # NOTE: Do NOT call handle_modal_close() here โ€” the contact-info - # overlay *is* a dialog/modal. Dismissing it would destroy the - # content before the JS evaluation below can read it. - - raw = await self._page.evaluate( - """() => { - const dialog = document.querySelector('dialog[open]'); - if (dialog) return dialog.innerText.trim(); - const modal = document.querySelector('.artdeco-modal__content'); - if (modal) return modal.innerText.trim(); - const main = document.querySelector('main'); - return main ? main.innerText.trim() : document.body.innerText.trim(); - }""" + logger.info( + "Retrying overlay %s after %.0fs backoff", + url, + _RATE_LIMIT_RETRY_DELAY, ) - - if not raw: - return "" - cleaned = strip_linkedin_noise(raw) - if not cleaned and raw.strip(): - logger.warning( - "Overlay %s returned only LinkedIn chrome (likely rate-limited)", - url, - ) - return _RATE_LIMITED_MSG - return cleaned + await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) + return await self._extract_overlay_once(url) except LinkedInScraperException: raise @@ -179,6 +157,45 @@ async def _extract_overlay(self, url: str) -> str: logger.warning("Failed to extract overlay %s: %s", url, e) return "" + async def _extract_overlay_once(self, url: str) -> str: + """Single attempt to extract content from an overlay/modal page.""" + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + # Wait for the dialog/modal to render (LinkedIn uses native ) + try: + await self._page.wait_for_selector( + "dialog[open], .artdeco-modal__content", timeout=5000 + ) + except PlaywrightTimeoutError: + logger.debug("No modal overlay found on %s, falling back to main", url) + + # NOTE: Do NOT call handle_modal_close() here โ€” the contact-info + # overlay *is* a dialog/modal. Dismissing it would destroy the + # content before the JS evaluation below can read it. + + raw = await self._page.evaluate( + """() => { + const dialog = document.querySelector('dialog[open]'); + if (dialog) return dialog.innerText.trim(); + const modal = document.querySelector('.artdeco-modal__content'); + if (modal) return modal.innerText.trim(); + const main = document.querySelector('main'); + return main ? main.innerText.trim() : document.body.innerText.trim(); + }""" + ) + + if not raw: + return "" + cleaned = strip_linkedin_noise(raw) + if not cleaned and raw.strip(): + logger.warning( + "Overlay %s returned only LinkedIn chrome (likely rate-limited)", + url, + ) + return _RATE_LIMITED_MSG + return cleaned + async def scrape_person( self, username: str, fields: PersonScrapingFields ) -> dict[str, Any]: From e307176ee4d946fb704b523a7f358dd5dd388187 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 16 Feb 2026 13:54:23 +0000 Subject: [PATCH 383/565] chore: update manifest.json and docker-compose.yml to v4.0.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 5d48b5d5..b86fd519 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:3.0.4 + image: stickerdaniel/linkedin-mcp-server:4.0.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 34723b09..977cc65e 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "3.0.4", + "version": "4.0.0", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:3.0.4", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.0.0", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:3.0.4" + "stickerdaniel/linkedin-mcp-server:4.0.0" ] } }, From 47a7d1a8e7e788091f7e9b347b30bba9a99c6584 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 16 Feb 2026 15:16:40 +0100 Subject: [PATCH 384/565] Fix Anthropic LinkedIn link in README Updated the LinkedIn link for Anthropic to point to the research page. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index cbe41a6d..e1af4afc 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Suggest improvements for my CV to target this job posting https://www.linkedin.c ``` ``` -What has Anthropic been posting about recently? https://www.linkedin.com/company/anthropic/ +What has Anthropic been posting about recently? https://www.linkedin.com/company/anthropicresearch/ ``` ## Features & Tool Status From 1317f292a0b301a7f57f72b2a91f1c5a1be9e88f Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Mon, 16 Feb 2026 22:59:07 +0100 Subject: [PATCH 385/565] docs(README): Revise client configuration and add transport modes Updated client configuration instructions and added transport modes section. --- README.md | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index e1af4afc..8ca1e703 100644 --- a/README.md +++ b/README.md @@ -67,21 +67,7 @@ uvx linkedin-scraper-mcp --login This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. -**Step 2: Run the server** - -```bash -uvx linkedin-scraper-mcp -``` - -> [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again. - -### uvx Setup Help - -
-๐Ÿ”ง Configuration - -**Client Configuration:** +**Step 2: Client Configuration:** ```json { @@ -93,7 +79,14 @@ uvx linkedin-scraper-mcp } } ``` +> [!NOTE] +> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again +### uvx Setup Help + +
+๐Ÿ”ง Configuration + **Transport Modes:** - **Default (stdio)**: Standard communication for local MCP servers From 5b17c955718f16305496d4b429712edc9a45d3d9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Feb 2026 23:18:16 +0100 Subject: [PATCH 386/565] docs(README): Clarify uvx setup instructions and Docker profile mounting Updated the prerequisites section for uvx setup to simplify instructions and clarified the process for mounting the browser profile into the Docker container. --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 8ca1e703..08b7d5a7 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company ## ๐Ÿš€ uvx Setup (Recommended - Universal) -**Prerequisites:** Make sure you have [uv](https://docs.astral.sh/uv/) and Patchright `uvx patchright install chromium` installed. +**Prerequisites:** Install uv and run `uvx patchright install chromium` to set up the browser. ### Installation @@ -79,6 +79,7 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc } } ``` + > [!NOTE] > Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again @@ -86,7 +87,7 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc
๐Ÿ”ง Configuration - + **Transport Modes:** - **Default (stdio)**: Standard communication for local MCP servers @@ -173,7 +174,7 @@ uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 808 ### Authentication -Docker runs headless (no browser window), so you need to create a browser profile locally first and mount it into Docker. +Docker runs headless (no browser window), so you need to create a browser profile locally first and mount it into the container. **Step 1: Create profile using uvx (one-time setup)** From 0687d1f420458c9cb188cbf398307046e7b621de Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 17 Feb 2026 20:15:46 +0100 Subject: [PATCH 387/565] fix(cli): polish stdio startup flow --- README.md | 20 +++- docs/docker-hub.md | 2 + linkedin_mcp_server/__init__.py | 2 +- linkedin_mcp_server/cli.py | 85 ---------------- linkedin_mcp_server/cli_main.py | 51 +++++----- pyproject.toml | 1 - tests/test_cli_main.py | 168 ++++++++++++++++++++++++++++++++ uv.lock | 4 +- 8 files changed, 214 insertions(+), 119 deletions(-) delete mode 100644 linkedin_mcp_server/cli.py create mode 100644 tests/test_cli_main.py diff --git a/README.md b/README.md index 08b7d5a7..55c77a68 100644 --- a/README.md +++ b/README.md @@ -80,6 +80,10 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc } ``` +> [!NOTE] +> `stdio` is the default transport, so no `--transport` flag is required in MCP client configs. +> If you run the command directly in an interactive terminal and no transport is set, you'll be prompted to choose one. + > [!NOTE] > Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again @@ -92,13 +96,15 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For web-based MCP server +- If no transport is specified, the server defaults to `stdio` +- Interactive terminal runs without explicit transport show a chooser prompt **CLI Options:** - `--login` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--transport {stdio,streamable-http}` - Set transport mode +- `--transport {stdio,streamable-http}` - Optional: force transport mode (default: stdio) - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) @@ -123,6 +129,8 @@ uvx linkedin-scraper-mcp --log-level DEBUG uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 8080 --path /mcp ``` +Runtime server logs are emitted by FastMCP/Uvicorn. + **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` @@ -216,11 +224,13 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For a web-based MCP server +- If no transport is specified, the server defaults to `stdio` +- Interactive terminal runs without explicit transport show a chooser prompt **CLI Options:** - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--transport {stdio,streamable-http}` - Set transport mode +- `--transport {stdio,streamable-http}` - Optional: force transport mode (default: stdio) - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) @@ -242,6 +252,8 @@ docker run -it --rm \ --transport streamable-http --host 0.0.0.0 --port 8080 --path /mcp ``` +Runtime server logs are emitted by FastMCP/Uvicorn. + **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` @@ -376,7 +388,7 @@ uv run -m linkedin_mcp_server - `--login` - Open browser to log in and save persistent profile - `--no-headless` - Show browser window (useful for debugging scraping issues) - `--log-level {DEBUG,INFO,WARNING,ERROR}` - Set logging level (default: WARNING) -- `--transport {stdio,streamable-http}` - Set transport mode +- `--transport {stdio,streamable-http}` - Optional: force transport mode (default: stdio) - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) @@ -411,6 +423,8 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por } ``` +`stdio` is used by default for this config. +
diff --git a/docs/docker-hub.md b/docs/docker-hub.md index bfe90568..51c3fe01 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -38,6 +38,8 @@ uvx linkedin-scraper-mcp --login ``` > **Note:** Docker containers don't have a display server, so you can't use the `--login` command in Docker. Create a profile on your host first. +> +> **Note:** `stdio` is the default transport. Add `--transport streamable-http` only when you specifically want HTTP mode. ## Environment Variables diff --git a/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py index f0950836..99217578 100644 --- a/linkedin_mcp_server/__init__.py +++ b/linkedin_mcp_server/__init__.py @@ -25,6 +25,6 @@ from importlib.metadata import PackageNotFoundError, version try: - __version__ = version("linkedin-mcp-server") + __version__ = version("linkedin-scraper-mcp") except PackageNotFoundError: __version__ = "0.0.0.dev" # Running from source without install diff --git a/linkedin_mcp_server/cli.py b/linkedin_mcp_server/cli.py deleted file mode 100644 index daad2ecd..00000000 --- a/linkedin_mcp_server/cli.py +++ /dev/null @@ -1,85 +0,0 @@ -# src/linkedin_mcp_server/cli.py -""" -CLI utilities for LinkedIn MCP server configuration generation. - -Automatically generates Claude Desktop configuration with proper tool registration, -environment variables, and clipboard integration for seamless setup workflow. -""" - -import json -import logging -import os -import subprocess -from typing import Any, Dict, List - -import pyperclip - -logger = logging.getLogger(__name__) - - -def print_claude_config() -> None: - """ - Print Claude configuration and copy to clipboard. - - This function generates the configuration needed for Claude Desktop - and copies it to the clipboard for easy pasting. - """ - current_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) - - # Find the full path to uv executable - try: - uv_path = subprocess.check_output(["which", "uv"], text=True).strip() - print(f"๐Ÿ” Found uv at: {uv_path}") - except subprocess.CalledProcessError: - # Fallback if which uv fails - uv_path = "uv" - print( - "โš ๏ธ Could not find full path to uv, using 'uv' directly. " - "This may not work in Claude Desktop." - ) - - # Include useful command-line arguments in the default args - args: List[str] = [ - "--directory", - current_dir, - "run", - "-m", - "linkedin_mcp_server", - ] - - config_json: Dict[str, Any] = { - "mcpServers": { - "linkedin-scraper": { - "command": uv_path, - "args": args, - "disabled": False, - "requiredTools": [ - "get_person_profile", - "get_company_profile", - "get_job_details", - "search_jobs", - ], - } - } - } - - # Convert to string for clipboard - config_str = json.dumps(config_json, indent=2) - - # Print the final configuration - print("\n๐Ÿ“‹ Your Claude configuration should look like:") - print(config_str) - print( - "\n๐Ÿ”ง Add this to your Claude Desktop configuration in Settings > Developer > Edit Config" - ) - - # Copy to clipboard - try: - pyperclip.copy(config_str) - print("โœ… Claude configuration copied to clipboard!") - except ImportError: - print( - "โš ๏ธ pyperclip not installed. To copy configuration automatically, run: uv add pyperclip" - ) - except Exception as e: - print(f"โŒ Could not copy to clipboard: {e}") diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 6d7bded6..899841d9 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -7,7 +7,6 @@ """ import asyncio -import io import logging import sys from typing import Literal @@ -20,7 +19,6 @@ clear_profile, get_authentication_source, ) -from linkedin_mcp_server.cli import print_claude_config from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.browser import ( close_browser, @@ -205,7 +203,18 @@ def ensure_authentication_ready() -> None: def get_version() -> str: - """Get version from pyproject.toml.""" + """Get version from installed metadata with a source fallback.""" + try: + from importlib.metadata import PackageNotFoundError, version + + for package_name in ("linkedin-scraper-mcp", "linkedin-mcp-server"): + try: + return version(package_name) + except PackageNotFoundError: + continue + except Exception: + pass + try: import os import tomllib @@ -222,8 +231,6 @@ def get_version() -> str: def main() -> None: """Main application entry point.""" - sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") - config = get_config() # Configure logging @@ -261,7 +268,8 @@ def main() -> None: # Phase 1: Ensure Authentication is Ready try: ensure_authentication_ready() - print("โœ… Authentication ready") + if config.is_interactive: + print("โœ… Authentication ready") logger.info("Authentication ready") except CredentialsNotFoundError as e: @@ -269,22 +277,23 @@ def main() -> None: if config.is_interactive: print("\nโŒ Authentication required") print(str(e)) - else: - print("\nโŒ Authentication required for non-interactive mode") sys.exit(1) except KeyboardInterrupt: - print("\n\n๐Ÿ‘‹ Setup cancelled by user") + if config.is_interactive: + print("\n\n๐Ÿ‘‹ Setup cancelled by user") sys.exit(0) except (AuthenticationError, RateLimitError) as e: logger.error(f"LinkedIn error during setup: {e}") - print(f"\nโŒ {str(e)}") + if config.is_interactive: + print(f"\nโŒ {str(e)}") sys.exit(1) except Exception as e: - logger.error(f"Unexpected error during authentication setup: {e}") - print(f"\nโŒ Setup failed: {e}") + logger.exception(f"Unexpected error during authentication setup: {e}") + if config.is_interactive: + print(f"\nโŒ Setup failed: {e}") sys.exit(1) # Phase 2: Server Runtime @@ -296,18 +305,10 @@ def main() -> None: print("\n๐Ÿš€ Server ready! Choose transport mode:") transport = choose_transport_interactive() - # Print Claude config in interactive stdio mode - if config.is_interactive and transport == "stdio": - print_claude_config() - # Create and run the MCP server mcp = create_mcp_server() - print(f"\n๐Ÿš€ Running LinkedIn MCP server ({transport.upper()} mode)...") if transport == "streamable-http": - print( - f"๐Ÿ“ก HTTP server at http://{config.server.host}:{config.server.port}{config.server.path}" - ) mcp.run( transport=transport, host=config.server.host, @@ -318,18 +319,17 @@ def main() -> None: mcp.run(transport=transport) except KeyboardInterrupt: - print("\nโน๏ธ Server stopped by user") exit_gracefully(0) except Exception as e: - logger.error(f"Server runtime error: {e}") - print(f"\nโŒ Server error: {e}") + logger.exception(f"Server runtime error: {e}") + if config.is_interactive: + print(f"\nโŒ Server error: {e}") exit_gracefully(1) def exit_gracefully(exit_code: int = 0) -> None: """Exit the application gracefully with browser cleanup.""" - print("๐Ÿ‘‹ Shutting down LinkedIn MCP server...") try: asyncio.run(close_browser()) except Exception: @@ -343,9 +343,8 @@ def exit_gracefully(exit_code: int = 0) -> None: except KeyboardInterrupt: exit_gracefully(0) except Exception as e: - logger.error( + logger.exception( f"Error running MCP server: {e}", extra={"exception_type": type(e).__name__, "exception_message": str(e)}, ) - print(f"โŒ Error running MCP server: {e}") exit_gracefully(1) diff --git a/pyproject.toml b/pyproject.toml index 6bbce3e0..2e307ee2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,6 @@ dependencies = [ "fastmcp>=2.14.0", "inquirer>=3.4.0", "patchright>=1.40.0", - "pyperclip>=1.9.0", "python-dotenv>=1.1.1", ] diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py new file mode 100644 index 00000000..5965fc62 --- /dev/null +++ b/tests/test_cli_main.py @@ -0,0 +1,168 @@ +"""Tests for CLI startup behavior and transport selection.""" + +import importlib.metadata +from typing import Literal +from unittest.mock import MagicMock + +import pytest + +import linkedin_mcp_server.cli_main as cli_main +from linkedin_mcp_server.config.schema import AppConfig +from linkedin_mcp_server.exceptions import CredentialsNotFoundError + + +def _make_config( + *, + is_interactive: bool, + transport: Literal["stdio", "streamable-http"], + transport_explicitly_set: bool, +) -> AppConfig: + config = AppConfig() + config.is_interactive = is_interactive + config.server.transport = transport + config.server.transport_explicitly_set = transport_explicitly_set + return config + + +def _patch_main_dependencies( + monkeypatch: pytest.MonkeyPatch, config: AppConfig +) -> None: + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_config", lambda: config) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_version", lambda: "4.0.0") + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.ensure_authentication_ready", lambda: None + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.set_headless", lambda _x: None) + + +def test_main_non_interactive_stdio_has_no_human_stdout( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + config = _make_config( + is_interactive=False, transport="stdio", transport_explicitly_set=False + ) + _patch_main_dependencies(monkeypatch, config) + mcp = MagicMock() + monkeypatch.setattr("linkedin_mcp_server.cli_main.create_mcp_server", lambda: mcp) + + cli_main.main() + + mcp.run.assert_called_once_with(transport="stdio") + captured = capsys.readouterr() + assert captured.out == "" + + +def test_main_interactive_prompts_when_transport_not_explicit( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + config = _make_config( + is_interactive=True, transport="stdio", transport_explicitly_set=False + ) + _patch_main_dependencies(monkeypatch, config) + choose_transport = MagicMock(return_value="streamable-http") + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.choose_transport_interactive", choose_transport + ) + mcp = MagicMock() + monkeypatch.setattr("linkedin_mcp_server.cli_main.create_mcp_server", lambda: mcp) + + cli_main.main() + + choose_transport.assert_called_once_with() + captured = capsys.readouterr() + assert "Server ready! Choose transport mode:" in captured.out + mcp.run.assert_called_once_with( + transport="streamable-http", + host=config.server.host, + port=config.server.port, + path=config.server.path, + ) + + +def test_main_explicit_transport_skips_prompt( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + config = _make_config( + is_interactive=True, transport="stdio", transport_explicitly_set=True + ) + _patch_main_dependencies(monkeypatch, config) + choose_transport = MagicMock(return_value="streamable-http") + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.choose_transport_interactive", choose_transport + ) + mcp = MagicMock() + monkeypatch.setattr("linkedin_mcp_server.cli_main.create_mcp_server", lambda: mcp) + + cli_main.main() + + choose_transport.assert_not_called() + captured = capsys.readouterr() + assert "Server ready! Choose transport mode:" not in captured.out + mcp.run.assert_called_once_with(transport="stdio") + + +def test_main_streamable_http_passes_host_port_path( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + config = _make_config( + is_interactive=False, + transport="streamable-http", + transport_explicitly_set=True, + ) + config.server.host = "0.0.0.0" + config.server.port = 8123 + config.server.path = "/custom-mcp" + _patch_main_dependencies(monkeypatch, config) + mcp = MagicMock() + monkeypatch.setattr("linkedin_mcp_server.cli_main.create_mcp_server", lambda: mcp) + + cli_main.main() + + mcp.run.assert_called_once_with( + transport="streamable-http", + host="0.0.0.0", + port=8123, + path="/custom-mcp", + ) + captured = capsys.readouterr() + assert captured.out == "" + + +def test_get_version_prefers_installed_metadata( + monkeypatch: pytest.MonkeyPatch, +) -> None: + calls: list[str] = [] + + def fake_version(package_name: str) -> str: + calls.append(package_name) + if package_name == "linkedin-scraper-mcp": + return "4.2.0" + raise importlib.metadata.PackageNotFoundError(package_name) + + monkeypatch.setattr(importlib.metadata, "version", fake_version) + + assert cli_main.get_version() == "4.2.0" + assert calls == ["linkedin-scraper-mcp"] + + +def test_main_non_interactive_auth_failure_has_no_stdout( + monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] +) -> None: + config = _make_config( + is_interactive=False, transport="stdio", transport_explicitly_set=False + ) + _patch_main_dependencies(monkeypatch, config) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.ensure_authentication_ready", + lambda: (_ for _ in ()).throw(CredentialsNotFoundError("missing profile")), + ) + + with pytest.raises(SystemExit) as exit_info: + cli_main.main() + + assert exit_info.value.code == 1 + captured = capsys.readouterr() + assert captured.out == "" diff --git a/uv.lock b/uv.lock index c3c39458..44cf7cc8 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.12" [[package]] @@ -1037,7 +1037,6 @@ dependencies = [ { name = "fastmcp" }, { name = "inquirer" }, { name = "patchright" }, - { name = "pyperclip" }, { name = "python-dotenv" }, ] @@ -1058,7 +1057,6 @@ requires-dist = [ { name = "fastmcp", specifier = ">=2.14.0" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "patchright", specifier = ">=1.40.0" }, - { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, ] From 06981e75710ec0d9a268d5ea9985cd300fcc7cfe Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Tue, 17 Feb 2026 20:44:49 +0100 Subject: [PATCH 388/565] Apply suggestions from code review --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 55c77a68..0a9bd123 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For web-based MCP server - If no transport is specified, the server defaults to `stdio` -- Interactive terminal runs without explicit transport show a chooser prompt +- An interactive terminal without explicit transport shows a chooser prompt **CLI Options:** @@ -225,7 +225,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - **Default (stdio)**: Standard communication for local MCP servers - **Streamable HTTP**: For a web-based MCP server - If no transport is specified, the server defaults to `stdio` -- Interactive terminal runs without explicit transport show a chooser prompt +- An interactive terminal without explicit transport shows a chooser prompt **CLI Options:** From 18c1a125fd342189cc7da7c9ba2fbc42b714a8a8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 17 Feb 2026 20:54:12 +0100 Subject: [PATCH 389/565] docs(README): Remove redundant note on default transport for MCP client configs --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index 0a9bd123..e1cd06b7 100644 --- a/README.md +++ b/README.md @@ -80,10 +80,6 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc } ``` -> [!NOTE] -> `stdio` is the default transport, so no `--transport` flag is required in MCP client configs. -> If you run the command directly in an interactive terminal and no transport is set, you'll be prompted to choose one. - > [!NOTE] > Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again From 794d44c82cf00376e2902e49853b2f45798d391f Mon Sep 17 00:00:00 2001 From: Connor Moss Date: Fri, 20 Feb 2026 11:28:03 -0500 Subject: [PATCH 390/565] feat(tools): add search_people tool --- AGENTS.md | 1 + linkedin_mcp_server/scraping/extractor.py | 28 +++++++++++++ linkedin_mcp_server/tools/person.py | 50 +++++++++++++++++++++++ tests/test_tools.py | 27 ++++++++++++ 4 files changed, 106 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index e54ded43..94d20690 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -61,6 +61,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis | `get_job_details` | Get job posting details | | `search_jobs` | Search jobs by keywords and location | | `close_session` | Close browser session and clean up resources | +| `search_people` | Search for people by keywords and location | **Tool Return Format:** diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 10998b88..2a34a397 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -383,3 +383,31 @@ async def search_jobs( "pages_visited": [url], "sections_requested": ["search_results"], } + + async def search_people( + self, + keywords: str, + location: str | None = None, + ) -> dict[str, Any]: + """Search for people and extract the results page. + + Returns: + {url, sections: {name: text}, pages_visited, sections_requested} + """ + params = f"keywords={quote_plus(keywords)}" + if location: + params += f"&location={quote_plus(location)}" + + url = f"https://www.linkedin.com/search/results/people/?{params}" + text = await self.extract_page(url) + + sections: dict[str, str] = {} + if text: + sections["search_results"] = text + + return { + "url": url, + "sections": sections, + "pages_visited": [url], + "sections_requested": ["search_results"], + } diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index f0caf4d6..56895f88 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -83,3 +83,53 @@ async def get_person_profile( except Exception as e: return handle_tool_error(e, "get_person_profile") + + @mcp.tool( + annotations=ToolAnnotations( + title="Search People", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) + async def search_people( + keywords: str, + ctx: Context, + location: str | None = None, + ) -> dict[str, Any]: + """ + Search for people on LinkedIn. + + Args: + keywords: Search keywords (e.g., "software engineer", "recruiter at Google") + ctx: FastMCP context for progress reporting + location: Optional location filter (e.g., "New York", "Remote") + + Returns: + Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + The LLM should parse the raw text to extract individual people and their profiles. + """ + try: + await ensure_authenticated() + + logger.info( + "Searching people: keywords='%s', location='%s'", + keywords, + location, + ) + + browser = await get_or_create_browser() + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Starting people search" + ) + + result = await extractor.search_people(keywords, location) + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return result + + except Exception as e: + return handle_tool_error(e, "search_people") diff --git a/tests/test_tools.py b/tests/test_tools.py index 6d9ee3aa..f9df4e96 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -40,6 +40,7 @@ def _make_mock_extractor(scrape_result: dict) -> MagicMock: mock.scrape_company = AsyncMock(return_value=scrape_result) mock.scrape_job = AsyncMock(return_value=scrape_result) mock.search_jobs = AsyncMock(return_value=scrape_result) + mock.search_people = AsyncMock(return_value=scrape_result) mock.extract_page = AsyncMock(return_value="some text") return mock @@ -128,6 +129,32 @@ async def test_get_person_profile_error(self, mock_context, monkeypatch): result = await tool_fn("test-user", mock_context) assert result["error"] == "session_expired" + async def test_search_people( + self, mock_context, patch_tool_deps, monkeypatch + ): + expected = { + "url": "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York", + "sections": {"search_results": "Jane Doe\nAI Engineer at Acme\nNew York"}, + "pages_visited": [ + "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York" + ], + "sections_requested": ["search_results"], + } + mock_extractor = _make_mock_extractor(expected) + monkeypatch.setattr( + "linkedin_mcp_server.tools.person.LinkedInExtractor", + lambda *a, **kw: mock_extractor, + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "search_people") + result = await tool_fn("AI engineer", mock_context, location="New York") + assert "search_results" in result["sections"] + mock_extractor.search_people.assert_awaited_once_with("AI engineer", "New York") class TestCompanyTools: async def test_get_company_profile( From a76b271f51cf9ecb76c772f38f19db1c62a611d4 Mon Sep 17 00:00:00 2001 From: Connor Moss Date: Fri, 20 Feb 2026 11:47:31 -0500 Subject: [PATCH 391/565] style: fix ruff formatting in test_tools --- tests/test_tools.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index f9df4e96..9f0f1b7f 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -129,9 +129,7 @@ async def test_get_person_profile_error(self, mock_context, monkeypatch): result = await tool_fn("test-user", mock_context) assert result["error"] == "session_expired" - async def test_search_people( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): expected = { "url": "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York", "sections": {"search_results": "Jane Doe\nAI Engineer at Acme\nNew York"}, @@ -156,6 +154,7 @@ async def test_search_people( assert "search_results" in result["sections"] mock_extractor.search_people.assert_awaited_once_with("AI engineer", "New York") + class TestCompanyTools: async def test_get_company_profile( self, mock_context, patch_tool_deps, monkeypatch From 1e75f3615084d4f629265a83f7ff45264b3fdc97 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:11:55 +0100 Subject: [PATCH 392/565] docs(README/Docker): add 'search_people' feature to README and Docker Hub documentation --- README.md | 1 + docs/docker-hub.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index e1cd06b7..5ba9e653 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_company_profile` | Extract company information with explicit section selection (posts, jobs) | Working | | `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | | `search_jobs` | Search for jobs with keywords and location filters | Working | +| `search_people` | Search for people by keywords and location | Working | | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 51c3fe01..e122abc5 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -8,6 +8,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A - **Company Profiles**: Extract comprehensive company data - **Job Details**: Retrieve job posting information - **Job Search**: Search for jobs with keywords and location filters +- **People Search**: Search for people by keywords and location - **Company Posts**: Get recent posts from a company's LinkedIn feed ## Quick Start From 178b69cead0bc77045e21b860609c1fb6c9206de Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:21:05 +0100 Subject: [PATCH 393/565] chore: bump version to 4.1.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2e307ee2..5a5c74f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.0.0" +version = "4.1.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 44cf7cc8..47b88709 100644 --- a/uv.lock +++ b/uv.lock @@ -1031,7 +1031,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.0.0" +version = "4.1.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From fc0868231172f0968fa33ea651d5a11f52b64084 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:24:27 +0100 Subject: [PATCH 394/565] docs(README): update contributing guidelines to encourage issue discussion before PRs --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5ba9e653..e423ed8e 100644 --- a/README.md +++ b/README.md @@ -462,7 +462,9 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por
-Feel free to open an [issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) or [PR](https://github.com/stickerdaniel/linkedin-mcp-server/pulls)! +## Contributing + +Contributions are welcome! Please **open an issue first** to discuss the feature or bug fix before submitting a PR. This helps align on the approach before any code is written.

From a35072aa68f93f52d0325cde72eb118db6a96d83 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:26:35 +0100 Subject: [PATCH 395/565] docs(README): consolidate contributing guidelines and remove redundant section --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e423ed8e..6d082455 100644 --- a/README.md +++ b/README.md @@ -346,6 +346,8 @@ Runtime server logs are emitted by FastMCP/Uvicorn. ## ๐Ÿ Local Setup (Develop & Contribute) +Contributions are welcome! Please [open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) first to discuss the feature or bug fix before submitting a PR. This helps align on the approach before any code is written. + **Prerequisites:** [Git](https://git-scm.com/downloads) and [uv](https://docs.astral.sh/uv/) installed ### Installation @@ -462,9 +464,6 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por
-## Contributing - -Contributions are welcome! Please **open an issue first** to discuss the feature or bug fix before submitting a PR. This helps align on the approach before any code is written.

From 7fc75648989d8cd31e610db346faee8cecc30546 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:39:08 +0100 Subject: [PATCH 396/565] ci(release): bypass branch protection for bot push --- .github/workflows/release.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0af0a9ec..fb4be0db 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -55,6 +55,7 @@ jobs: contents: write packages: write id-token: write # Required for PyPI Trusted Publishing + administration: write # needed to temporarily lift branch protection steps: - name: Checkout code @@ -78,6 +79,13 @@ jobs: sed -i 's/stickerdaniel\/linkedin-mcp-server:[^ ]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' docker-compose.yml echo "โœ… Updated manifest.json and docker-compose.yml to version $VERSION" + - name: Remove branch protection (temporary) + run: | + gh api repos/${{ github.repository }}/branches/main/protection \ + --method DELETE + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Commit version updates run: | set -e @@ -92,6 +100,28 @@ jobs: echo "โœ… Committed version updates" fi + - name: Restore branch protection + if: always() + run: | + gh api repos/${{ github.repository }}/branches/main/protection \ + --method PUT \ + --input - <<'EOF' + { + "required_status_checks": { + "strict": true, + "checks": [ + {"context": "lint-and-check", "app_id": 15368}, + {"context": "test", "app_id": 15368} + ] + }, + "enforce_admins": false, + "required_pull_request_reviews": null, + "restrictions": null + } + EOF + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Create release tag run: | set -e From e3a9a0dce148a79a1314e415a9b9a68d71b8a999 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:41:57 +0100 Subject: [PATCH 397/565] ci(release): require PRs and enforce admins on main --- .github/workflows/release.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fb4be0db..387a8a8f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -114,8 +114,12 @@ jobs: {"context": "test", "app_id": 15368} ] }, - "enforce_admins": false, - "required_pull_request_reviews": null, + "enforce_admins": true, + "required_pull_request_reviews": { + "dismiss_stale_reviews": false, + "require_code_owner_reviews": false, + "required_approving_review_count": 0 + }, "restrictions": null } EOF From a04be8ee733a85b756357da3041cdcf0a3491929 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 18:53:17 +0100 Subject: [PATCH 398/565] chore: bump version to 4.1.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5a5c74f2..af1e0ee6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.1.0" +version = "4.1.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 47b88709..90753233 100644 --- a/uv.lock +++ b/uv.lock @@ -1031,7 +1031,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.1.0" +version = "4.1.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From e4726809bf00873a235de4c850f1cdb046298cf4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 19:05:09 +0100 Subject: [PATCH 399/565] fix(release): avoid YAML heredoc issue in restore step --- .github/workflows/release.yml | 42 +++++++++++++++++------------------ 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 387a8a8f..d6825cc6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -102,29 +102,29 @@ jobs: - name: Restore branch protection if: always() - run: | - gh api repos/${{ github.repository }}/branches/main/protection \ - --method PUT \ - --input - <<'EOF' - { - "required_status_checks": { - "strict": true, - "checks": [ - {"context": "lint-and-check", "app_id": 15368}, - {"context": "test", "app_id": 15368} - ] - }, - "enforce_admins": true, - "required_pull_request_reviews": { - "dismiss_stale_reviews": false, - "require_code_owner_reviews": false, - "required_approving_review_count": 0 - }, - "restrictions": null - } - EOF env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PAYLOAD: >- + { + "required_status_checks": { + "strict": true, + "checks": [ + {"context": "lint-and-check", "app_id": 15368}, + {"context": "test", "app_id": 15368} + ] + }, + "enforce_admins": true, + "required_pull_request_reviews": { + "dismiss_stale_reviews": false, + "require_code_owner_reviews": false, + "required_approving_review_count": 0 + }, + "restrictions": null + } + run: | + echo "$PAYLOAD" | gh api repos/${{ github.repository }}/branches/main/protection \ + --method PUT \ + --input - - name: Create release tag run: | From 3294888223e81dcb54bfbe47f1e8be59e2ad5085 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Feb 2026 19:15:20 +0100 Subject: [PATCH 400/565] fix(release): use PAT for branch protection API calls Bump version to 4.1.2 to trigger release workflow test. --- .github/workflows/release.yml | 5 ++--- pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d6825cc6..fbaca058 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -55,7 +55,6 @@ jobs: contents: write packages: write id-token: write # Required for PyPI Trusted Publishing - administration: write # needed to temporarily lift branch protection steps: - name: Checkout code @@ -84,7 +83,7 @@ jobs: gh api repos/${{ github.repository }}/branches/main/protection \ --method DELETE env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GH_ADMIN_TOKEN }} - name: Commit version updates run: | @@ -103,7 +102,7 @@ jobs: - name: Restore branch protection if: always() env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GH_ADMIN_TOKEN }} PAYLOAD: >- { "required_status_checks": { diff --git a/pyproject.toml b/pyproject.toml index af1e0ee6..7f8cbd7b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.1.1" +version = "4.1.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 90753233..eab45f07 100644 --- a/uv.lock +++ b/uv.lock @@ -1031,7 +1031,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.1.1" +version = "4.1.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 3ec7a5b601e147e8f6dfc56f71b6a0a687fa2ed4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 20 Feb 2026 18:47:02 +0000 Subject: [PATCH 401/565] chore: update manifest.json and docker-compose.yml to v4.1.2 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b86fd519..5436362c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.0.0 + image: stickerdaniel/linkedin-mcp-server:4.1.2 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 977cc65e..98098193 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.0.0", + "version": "4.1.2", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.0.0", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.1.2", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.0.0" + "stickerdaniel/linkedin-mcp-server:4.1.2" ] } }, From a8d1fb83d5d6627c5a5a1529ad87fae5c90ac71a Mon Sep 17 00:00:00 2001 From: ConnorMoss02 Date: Thu, 26 Feb 2026 13:48:20 -0500 Subject: [PATCH 402/565] feat(tools): add posts section to get_person_profile --- AGENTS.md | 2 +- linkedin_mcp_server/scraping/extractor.py | 6 ++++++ linkedin_mcp_server/scraping/fields.py | 2 ++ linkedin_mcp_server/tools/person.py | 4 ++-- tests/test_fields.py | 3 ++- tests/test_scraping.py | 18 +++++++++++++++++- 6 files changed, 30 insertions(+), 5 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 94d20690..3c4a1b8a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -55,7 +55,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis | Tool | Description | |------|-------------| -| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, honors, languages, contact_info) | +| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, honors, languages, contact_info, posts) | | `get_company_profile` | Get company info with explicit `sections` selection (posts, jobs) | | `get_company_posts` | Get recent posts from company feed | | `get_job_details` | Get job posting details | diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 2a34a397..0c620a85 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -248,6 +248,12 @@ async def scrape_person( "/overlay/contact-info/", True, ), + ( + PersonScrapingFields.POSTS, + "posts", + "/recent-activity/all/", + False, + ), ] for flag, section_name, suffix, is_overlay in page_map: diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py index 0315dd00..cdee9ff0 100644 --- a/linkedin_mcp_server/scraping/fields.py +++ b/linkedin_mcp_server/scraping/fields.py @@ -16,6 +16,7 @@ class PersonScrapingFields(Flag): HONORS = auto() # /in/{username}/details/honors/ LANGUAGES = auto() # /in/{username}/details/languages/ CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ + POSTS = auto() # /in/{username}/recent-activity/all/ class CompanyScrapingFields(Flag): @@ -34,6 +35,7 @@ class CompanyScrapingFields(Flag): "honors": PersonScrapingFields.HONORS, "languages": PersonScrapingFields.LANGUAGES, "contact_info": PersonScrapingFields.CONTACT_INFO, + "posts": PersonScrapingFields.POSTS, } COMPANY_SECTION_MAP: dict[str, CompanyScrapingFields] = { diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 56895f88..8ca5c856 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -45,8 +45,8 @@ async def get_person_profile( ctx: FastMCP context for progress reporting sections: Comma-separated list of extra sections to scrape. The main profile page is always included. - Available sections: experience, education, interests, honors, languages, contact_info - Examples: "experience,education", "contact_info", "honors,languages" + Available sections: experience, education, interests, honors, languages, contact_info, posts + Examples: "experience,education", "contact_info", "honors,languages", "posts" Default (None) scrapes only the main profile page. Returns: diff --git a/tests/test_fields.py b/tests/test_fields.py index f7d9977b..7574ee6a 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -101,7 +101,7 @@ def test_whitespace_and_case_handling(self): def test_all_sections(self): flags, unknown = parse_person_sections( - "experience,education,interests,honors,languages,contact_info" + "experience,education,interests,honors,languages,contact_info,posts" ) expected = ( PersonScrapingFields.BASIC_INFO @@ -111,6 +111,7 @@ def test_all_sections(self): | PersonScrapingFields.HONORS | PersonScrapingFields.LANGUAGES | PersonScrapingFields.CONTACT_INFO + | PersonScrapingFields.POSTS ) assert flags == expected assert unknown == [] diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 7493e153..abce9284 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -263,6 +263,7 @@ async def test_all_flags_visit_all_pages(self, mock_page): | PersonScrapingFields.HONORS | PersonScrapingFields.LANGUAGES | PersonScrapingFields.CONTACT_INFO + | PersonScrapingFields.POSTS ) with ( patch.object( @@ -282,7 +283,7 @@ async def test_all_flags_visit_all_pages(self, mock_page): urls = result["pages_visited"] # main_profile, experience, education, interests, honors, languages, contact_info - assert len(urls) == 7 + assert len(urls) == 8 assert result["sections_requested"] == [ "main_profile", "experience", @@ -291,8 +292,23 @@ async def test_all_flags_visit_all_pages(self, mock_page): "honors", "languages", "contact_info", + "posts", ] + async def test_posts_visits_recent_activity(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="Post 1\nPost 2", + ): + result = await extractor.scrape_person( + "test-user", PersonScrapingFields.POSTS + ) + assert any("recent-activity" in url for url in result["pages_visited"]) + assert "posts" in result["sections"] + async def test_error_isolation(self, mock_page): """One section failing doesn't block others.""" call_count = 0 From 19eec92f83c87564982641a49ea91d95f85a45da Mon Sep 17 00:00:00 2001 From: ConnorMoss02 Date: Thu, 26 Feb 2026 14:26:55 -0500 Subject: [PATCH 403/565] docs: add posts to get_person_profile README table --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6d082455..a6769991 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | Tool | Description | Status | |------|-------------|--------| -| `get_person_profile` | Get profile info with explicit section selection (experience, education, interests, honors, languages, contact_info) | Working | +| `get_person_profile` | Get profile info with explicit section selection (experience, education, interests, honors, languages, contact_info, posts) | Working | | `get_company_profile` | Extract company information with explicit section selection (posts, jobs) | Working | | `get_company_posts` | Get recent posts from a company's LinkedIn feed | Working | | `search_jobs` | Search for jobs with keywords and location filters | Working | From cd53bd84f5dd097584e1125d637b425ef7c28fd7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 4 Mar 2026 04:56:00 +0000 Subject: [PATCH 404/565] chore(deps): update ci dependencies --- .github/workflows/ci.yml | 4 ++-- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 4 ++-- Dockerfile | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 977d7b9d..0849e5b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up uv - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 + uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 with: enable-cache: true @@ -35,7 +35,7 @@ jobs: steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 + - uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 with: enable-cache: true diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index c704cda5..694fb468 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@68cfeead1890300cc87935dbe2c023825be87b8a # v1 + uses: anthropics/claude-code-action@5f8e5bfe5b03891348854ae401476fc905a6ff6a # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fbaca058..9fdf133b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 + uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 with: enable-cache: true @@ -63,7 +63,7 @@ jobs: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7 + uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 with: enable-cache: true diff --git a/Dockerfile b/Dockerfile index 89412a74..28d0c841 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.14-slim-bookworm@sha256:f0540d0436a220db0a576ccfe75631ab072391e43a24b88972ef9833f699095f +FROM python:3.14-slim-bookworm@sha256:5404df00cf00e6e7273375f415651837b4d192ac6859c44d3b740888ac798c99 # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:7a88d4c4e6f44200575000638453a5a381db0ae31ad5c3a51b14f8687c9d93a3 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:88234bc9e09c2b2f6d176a3daf411419eb0370d450a08129257410de9cfafd2a /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From 0f5a765875ba15f5ef9ce8ff18ce9ca09e3a8e3e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 09:58:55 +0100 Subject: [PATCH 405/565] docs: add person posts feature to docker-hub docs --- docs/docker-hub.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/docker-hub.md b/docs/docker-hub.md index e122abc5..2a288bb7 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -9,6 +9,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A - **Job Details**: Retrieve job posting information - **Job Search**: Search for jobs with keywords and location filters - **People Search**: Search for people by keywords and location +- **Person Posts**: Get recent activity/posts from a person's profile - **Company Posts**: Get recent posts from a company's LinkedIn feed ## Quick Start From c0f541d7921e04016dcedac4c16a08f08f8b42c6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 10:02:51 +0100 Subject: [PATCH 406/565] test: add POSTS flag to distinctness test and fix stale comment --- tests/test_fields.py | 1 + tests/test_scraping.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_fields.py b/tests/test_fields.py index 7574ee6a..0e729d3a 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -20,6 +20,7 @@ def test_atomic_flags_are_distinct(self): PersonScrapingFields.HONORS, PersonScrapingFields.LANGUAGES, PersonScrapingFields.CONTACT_INFO, + PersonScrapingFields.POSTS, ] for i, a in enumerate(flags): for b in flags[i + 1 :]: diff --git a/tests/test_scraping.py b/tests/test_scraping.py index abce9284..441d7ce0 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -282,7 +282,7 @@ async def test_all_flags_visit_all_pages(self, mock_page): result = await extractor.scrape_person("testuser", fields) urls = result["pages_visited"] - # main_profile, experience, education, interests, honors, languages, contact_info + # main_profile, experience, education, interests, honors, languages, contact_info, posts assert len(urls) == 8 assert result["sections_requested"] == [ "main_profile", From cf59350ece395207646cdfd306c3dff340d6601d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 10:15:33 +0100 Subject: [PATCH 407/565] docs(contributing): add CONTRIBUTING.md with contributor checklists --- CONTRIBUTING.md | 142 ++++++++++++++++++++++++++++++++++++++++++++++++ README.md | 2 +- 2 files changed, 143 insertions(+), 1 deletion(-) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..b3dd60cf --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,142 @@ +# Contributing + +Contributions are welcome! Please [open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) first to discuss the feature or bug fix before submitting a PR. + +## Development Setup + +See the [README](README.md#-local-setup-develop--contribute) for full setup instructions. + +```bash +git clone https://github.com/stickerdaniel/linkedin-mcp-server +cd linkedin-mcp-server +uv sync # Install dependencies +uv run pre-commit install # Set up pre-commit hooks +uv run patchright install chromium # Install browser +uv run pytest --cov # Run tests with coverage +``` + +## Architecture: One Flag = One Navigation + +The scraping engine is built around a **one-flag-one-navigation** design. Understanding this is key to contributing effectively. + +### Why This Design? + +AI assistants (LLMs) call our MCP tools. Each LinkedIn page navigation takes time and risks rate limits. By mapping each `Flag` to exactly one URL, the LLM can request only the sections it needs โ€” skipping unnecessary navigations while still capturing all available info from each visited page via `innerText` extraction. + +### How It Works + +**Flag enums** (`scraping/fields.py`) define which pages exist: + +```python +class PersonScrapingFields(Flag): + BASIC_INFO = auto() # /in/{username}/ + EXPERIENCE = auto() # /in/{username}/details/experience/ + CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ + POSTS = auto() # /in/{username}/recent-activity/all/ + # ... +``` + +**Section maps** connect user-facing names to flags: + +```python +PERSON_SECTION_MAP = { + "experience": PersonScrapingFields.EXPERIENCE, + "contact_info": PersonScrapingFields.CONTACT_INFO, + # ... +} +``` + +**Page maps** (`scraping/extractor.py`) wire flags to URLs: + +```python +# (flag, section_name, url_suffix, is_overlay) +page_map = [ + (PersonScrapingFields.BASIC_INFO, "main_profile", "/", False), + (PersonScrapingFields.EXPERIENCE, "experience", "/details/experience/", False), + (PersonScrapingFields.CONTACT_INFO, "contact_info", "/overlay/contact-info/", True), + # ... +] +``` + +The `is_overlay` boolean distinguishes modal overlays (like contact info) from full page navigations โ€” overlays use a different extraction method that reads from the `` element. + +**Return format** โ€” all scraping tools return: + +```python +{"url": str, "sections": {name: raw_text}, "pages_visited": list, "sections_requested": list} +``` + +## Checklist: Adding a New Section + +When adding a section to an existing tool (e.g., adding "certifications" to `get_person_profile`): + +### Code + +- [ ] Add flag to `PersonScrapingFields` or `CompanyScrapingFields` with URL comment (`scraping/fields.py`) +- [ ] Add entry to `PERSON_SECTION_MAP` or `COMPANY_SECTION_MAP` (`scraping/fields.py`) +- [ ] Add tuple to `page_map` in `scrape_person()` or `scrape_company()` (`scraping/extractor.py`) +- [ ] Update tool docstring with new section name (`tools/person.py` or `tools/company.py`) + +### Tests + +- [ ] Add flag to `test_atomic_flags_are_distinct` (`tests/test_fields.py`) +- [ ] Add to `test_all_sections` parse test (`tests/test_fields.py`) +- [ ] Update `test_all_flags_visit_all_pages` โ€” add flag, bump count, add to `sections_requested` list, update comment (`tests/test_scraping.py`) +- [ ] Add dedicated navigation test (e.g., `test_posts_visits_recent_activity`) (`tests/test_scraping.py`) + +### Docs + +- [ ] Update tool table in `README.md` +- [ ] Update tool table in `AGENTS.md` +- [ ] Update features list in `docs/docker-hub.md` +- [ ] Update tools array/description in `manifest.json` + +### Verify + +- [ ] `uv run pytest --cov` +- [ ] `uv run ruff check . --fix && uv run ruff format .` +- [ ] `uv run pre-commit run --all-files` + +## Checklist: Adding a New Tool + +When adding an entirely new MCP tool (e.g., `search_companies`): + +### Code + +- [ ] Add extractor method to `LinkedInExtractor` if needed (`scraping/extractor.py`) +- [ ] Add or extend tool registration function (`tools/*.py`) +- [ ] Register tools in `create_mcp_server()` if new file (`server.py`) + +### Tests + +- [ ] Add mock method to `_make_mock_extractor` (`tests/test_tools.py`) +- [ ] Add tool-level test class/method (`tests/test_tools.py`) +- [ ] Add extractor-level tests if new method (`tests/test_scraping.py`) + +### Docs + +- [ ] Update tool table in `README.md` +- [ ] Update tool table in `AGENTS.md` +- [ ] Update features list in `docs/docker-hub.md` +- [ ] Add tool to `tools` array in `manifest.json` + +### Verify + +- [ ] `uv run pytest --cov` +- [ ] `uv run ruff check . --fix && uv run ruff format .` +- [ ] `uv run pre-commit run --all-files` + +## Workflow + +1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) describing the feature or bug +2. Create a branch: `feature/-` +3. Implement, test, and update docs (see checklists above) +4. Open a PR โ€” AI agents review first, then manual review +5. Don't squash commits on merge + +## Code Style + +- **Commits:** conventional commits โ€” `type(scope): subject` (see [CLAUDE.md](CLAUDE.md) for details) +- **Lint/format:** `uv run ruff check . --fix && uv run ruff format .` +- **Type check:** `uv run ty check` +- **Tests:** `uv run pytest --cov` diff --git a/README.md b/README.md index 6d082455..901bbf12 100644 --- a/README.md +++ b/README.md @@ -346,7 +346,7 @@ Runtime server logs are emitted by FastMCP/Uvicorn. ## ๐Ÿ Local Setup (Develop & Contribute) -Contributions are welcome! Please [open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) first to discuss the feature or bug fix before submitting a PR. This helps align on the approach before any code is written. +Contributions are welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for architecture guidelines and checklists. Please [open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) first to discuss the feature or bug fix before submitting a PR. **Prerequisites:** [Git](https://git-scm.com/downloads) and [uv](https://docs.astral.sh/uv/) installed From c7cc59df695b800d936cf600049b9adad748c611 Mon Sep 17 00:00:00 2001 From: ConnorMoss02 Date: Wed, 4 Mar 2026 08:08:25 -0500 Subject: [PATCH 408/565] test: pin full URL path and assert sections_requested --- tests/test_scraping.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 441d7ce0..7a883362 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -282,7 +282,7 @@ async def test_all_flags_visit_all_pages(self, mock_page): result = await extractor.scrape_person("testuser", fields) urls = result["pages_visited"] - # main_profile, experience, education, interests, honors, languages, contact_info, posts + # main_profile, experience, education, interests, honors, languages, contact_info assert len(urls) == 8 assert result["sections_requested"] == [ "main_profile", @@ -306,8 +306,9 @@ async def test_posts_visits_recent_activity(self, mock_page): result = await extractor.scrape_person( "test-user", PersonScrapingFields.POSTS ) - assert any("recent-activity" in url for url in result["pages_visited"]) + assert any("/recent-activity/all/" in url for url in result["pages_visited"]) assert "posts" in result["sections"] + assert result["sections_requested"] == ["main_profile", "posts"] async def test_error_isolation(self, mock_page): """One section failing doesn't block others.""" From 3e3ebcc4ed8094869b2100436275e3cce084f85d Mon Sep 17 00:00:00 2001 From: ConnorMoss02 Date: Wed, 4 Mar 2026 08:10:01 -0500 Subject: [PATCH 409/565] docs: comments now updated --- tests/test_scraping.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 7a883362..42673461 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -282,7 +282,7 @@ async def test_all_flags_visit_all_pages(self, mock_page): result = await extractor.scrape_person("testuser", fields) urls = result["pages_visited"] - # main_profile, experience, education, interests, honors, languages, contact_info + # main_profile, experience, education, interests, honors, languages, contact_info, posts assert len(urls) == 8 assert result["sections_requested"] == [ "main_profile", From 0320c06ac53de2e93814ba12e8ded9500f069140 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 14:27:37 +0100 Subject: [PATCH 410/565] docs(contributing): add missing uv sync --group dev step --- CONTRIBUTING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b3dd60cf..a74adc7b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,6 +10,7 @@ See the [README](README.md#-local-setup-develop--contribute) for full setup inst git clone https://github.com/stickerdaniel/linkedin-mcp-server cd linkedin-mcp-server uv sync # Install dependencies +uv sync --group dev # Install dev dependencies uv run pre-commit install # Set up pre-commit hooks uv run patchright install chromium # Install browser uv run pytest --cov # Run tests with coverage From 1e81d4b256ed66c07a73a375ab6a2a4143d63e51 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 14:29:09 +0100 Subject: [PATCH 411/565] docs(contributing): fix examples to match main branch --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a74adc7b..46305e2e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -33,7 +33,7 @@ class PersonScrapingFields(Flag): BASIC_INFO = auto() # /in/{username}/ EXPERIENCE = auto() # /in/{username}/details/experience/ CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ - POSTS = auto() # /in/{username}/recent-activity/all/ + LANGUAGES = auto() # /in/{username}/details/languages/ # ... ``` @@ -83,7 +83,7 @@ When adding a section to an existing tool (e.g., adding "certifications" to `get - [ ] Add flag to `test_atomic_flags_are_distinct` (`tests/test_fields.py`) - [ ] Add to `test_all_sections` parse test (`tests/test_fields.py`) - [ ] Update `test_all_flags_visit_all_pages` โ€” add flag, bump count, add to `sections_requested` list, update comment (`tests/test_scraping.py`) -- [ ] Add dedicated navigation test (e.g., `test_posts_visits_recent_activity`) (`tests/test_scraping.py`) +- [ ] Add dedicated navigation test (e.g., `test_certifications_visits_details_page`) (`tests/test_scraping.py`) ### Docs From 23400714997372e0183b6f655edfdbaa5725d9b4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 14:42:26 +0100 Subject: [PATCH 412/565] docs(contributing): add fix/ branch prefix --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 46305e2e..ee3b1319 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -130,7 +130,7 @@ When adding an entirely new MCP tool (e.g., `search_companies`): ## Workflow 1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) describing the feature or bug -2. Create a branch: `feature/-` +2. Create a branch: `feature/-` or `fix/-` 3. Implement, test, and update docs (see checklists above) 4. Open a PR โ€” AI agents review first, then manual review 5. Don't squash commits on merge From 849b594ae00b10fc8dd92b85ebb08d0a7a3a8a11 Mon Sep 17 00:00:00 2001 From: Connor Moss Date: Wed, 4 Mar 2026 08:52:31 -0500 Subject: [PATCH 413/565] docs: sync manifest.json tools and features with current capabilities --- manifest.json | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/manifest.json b/manifest.json index 98098193..313bc072 100644 --- a/manifest.json +++ b/manifest.json @@ -4,7 +4,7 @@ "display_name": "LinkedIn MCP Server", "version": "4.1.2", "description": "Connect Claude to LinkedIn for profiles, companies, and job details", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.1.2", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.1.2", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -33,12 +33,16 @@ "tools": [ { "name": "get_person_profile", - "description": "Get detailed information from a LinkedIn profile including work history, education, skills, and connections" + "description": "Get detailed information from a LinkedIn profile including work history, education, skills, connections, and recent posts" }, { "name": "get_company_profile", "description": "Extract comprehensive company information and details" }, + { + "name": "get_company_posts", + "description": "Get recent posts from a company's LinkedIn feed" + }, { "name": "get_job_details", "description": "Retrieve specific job posting details using LinkedIn job IDs" @@ -47,6 +51,10 @@ "name": "search_jobs", "description": "Search for jobs with filters like keywords and location" }, + { + "name": "search_people", + "description": "Search for people on LinkedIn by keywords and location" + }, { "name": "close_session", "description": "Properly close browser session and clean up resources" From f43770d41936e45a5f170d9481e10e9d64d60626 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 15:29:15 +0100 Subject: [PATCH 414/565] refactor(scraping): replace Flag enums with config dicts Collapse three parallel data structures (Flag enums, SECTION_MAP dicts, page_map lists) into single PERSON_SECTIONS/COMPANY_SECTIONS config dicts. Remove pages_visited and sections_requested from return format. Resolves: #181 --- AGENTS.md | 6 +- CONTRIBUTING.md | 55 +++--- linkedin_mcp_server/scraping/__init__.py | 8 +- linkedin_mcp_server/scraping/extractor.py | 120 ++---------- linkedin_mcp_server/scraping/fields.py | 95 ++++------ linkedin_mcp_server/tools/company.py | 10 +- linkedin_mcp_server/tools/job.py | 4 +- linkedin_mcp_server/tools/person.py | 8 +- tests/test_fields.py | 211 +++++++++------------ tests/test_scraping.py | 216 +++++++++------------- tests/test_tools.py | 42 ++--- 11 files changed, 282 insertions(+), 493 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 3c4a1b8a..0189918e 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -65,13 +65,13 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** -All scraping tools return: `{url, sections: {name: raw_text}, pages_visited, sections_requested}` +All scraping tools return: `{url, sections: {name: raw_text}}` **Scraping Architecture (`scraping/`):** -- `fields.py` - `PersonScrapingFields` and `CompanyScrapingFields` Flag enums +- `fields.py` - `PERSON_SECTIONS` and `COMPANY_SECTIONS` config dicts mapping section name to `(url_suffix, is_overlay)` - `extractor.py` - `LinkedInExtractor` class using navigate-scroll-innerText pattern -- **One flag = one navigation.** Each `PersonScrapingFields` / `CompanyScrapingFields` flag must map to exactly one page navigation. Never combine multiple URLs behind a single flag. +- **One section = one navigation.** Each entry in `PERSON_SECTIONS` / `COMPANY_SECTIONS` maps to exactly one page navigation. Never combine multiple URLs behind a single section. **Core Subpackage (`core/`):** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ee3b1319..47cf22a1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,55 +16,44 @@ uv run patchright install chromium # Install browser uv run pytest --cov # Run tests with coverage ``` -## Architecture: One Flag = One Navigation +## Architecture: One Section = One Navigation -The scraping engine is built around a **one-flag-one-navigation** design. Understanding this is key to contributing effectively. +The scraping engine is built around a **one-section-one-navigation** design. Understanding this is key to contributing effectively. ### Why This Design? -AI assistants (LLMs) call our MCP tools. Each LinkedIn page navigation takes time and risks rate limits. By mapping each `Flag` to exactly one URL, the LLM can request only the sections it needs โ€” skipping unnecessary navigations while still capturing all available info from each visited page via `innerText` extraction. +AI assistants (LLMs) call our MCP tools. Each LinkedIn page navigation takes time and risks rate limits. By mapping each section to exactly one URL, the LLM can request only the sections it needs โ€” skipping unnecessary navigations while still capturing all available info from each visited page via `innerText` extraction. ### How It Works -**Flag enums** (`scraping/fields.py`) define which pages exist: +**Section config dicts** (`scraping/fields.py`) define which pages exist: ```python -class PersonScrapingFields(Flag): - BASIC_INFO = auto() # /in/{username}/ - EXPERIENCE = auto() # /in/{username}/details/experience/ - CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ - LANGUAGES = auto() # /in/{username}/details/languages/ - # ... -``` - -**Section maps** connect user-facing names to flags: - -```python -PERSON_SECTION_MAP = { - "experience": PersonScrapingFields.EXPERIENCE, - "contact_info": PersonScrapingFields.CONTACT_INFO, +# Maps section name -> (url_suffix, is_overlay) +PERSON_SECTIONS: dict[str, tuple[str, bool]] = { + "main_profile": ("/", False), + "experience": ("/details/experience/", False), + "contact_info": ("/overlay/contact-info/", True), + "languages": ("/details/languages/", False), # ... } ``` -**Page maps** (`scraping/extractor.py`) wire flags to URLs: +The `is_overlay` boolean distinguishes modal overlays (like contact info) from full page navigations โ€” overlays use a different extraction method that reads from the `` element. + +The extractor iterates the config dict directly, checking which sections the caller requested: ```python -# (flag, section_name, url_suffix, is_overlay) -page_map = [ - (PersonScrapingFields.BASIC_INFO, "main_profile", "/", False), - (PersonScrapingFields.EXPERIENCE, "experience", "/details/experience/", False), - (PersonScrapingFields.CONTACT_INFO, "contact_info", "/overlay/contact-info/", True), - # ... -] +for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): + if section_name not in requested: + continue + # navigate and extract... ``` -The `is_overlay` boolean distinguishes modal overlays (like contact info) from full page navigations โ€” overlays use a different extraction method that reads from the `` element. - **Return format** โ€” all scraping tools return: ```python -{"url": str, "sections": {name: raw_text}, "pages_visited": list, "sections_requested": list} +{"url": str, "sections": {name: raw_text}} ``` ## Checklist: Adding a New Section @@ -73,16 +62,14 @@ When adding a section to an existing tool (e.g., adding "certifications" to `get ### Code -- [ ] Add flag to `PersonScrapingFields` or `CompanyScrapingFields` with URL comment (`scraping/fields.py`) -- [ ] Add entry to `PERSON_SECTION_MAP` or `COMPANY_SECTION_MAP` (`scraping/fields.py`) -- [ ] Add tuple to `page_map` in `scrape_person()` or `scrape_company()` (`scraping/extractor.py`) +- [ ] Add entry to `PERSON_SECTIONS` or `COMPANY_SECTIONS` with `(url_suffix, is_overlay)` (`scraping/fields.py`) - [ ] Update tool docstring with new section name (`tools/person.py` or `tools/company.py`) ### Tests -- [ ] Add flag to `test_atomic_flags_are_distinct` (`tests/test_fields.py`) +- [ ] Add to `test_expected_keys` (`tests/test_fields.py`) - [ ] Add to `test_all_sections` parse test (`tests/test_fields.py`) -- [ ] Update `test_all_flags_visit_all_pages` โ€” add flag, bump count, add to `sections_requested` list, update comment (`tests/test_scraping.py`) +- [ ] Update `test_all_sections_visit_all_pages` โ€” add section to set, update assertions (`tests/test_scraping.py`) - [ ] Add dedicated navigation test (e.g., `test_certifications_visits_details_page`) (`tests/test_scraping.py`) ### Docs diff --git a/linkedin_mcp_server/scraping/__init__.py b/linkedin_mcp_server/scraping/__init__.py index 25ab79a8..07eb584b 100644 --- a/linkedin_mcp_server/scraping/__init__.py +++ b/linkedin_mcp_server/scraping/__init__.py @@ -2,16 +2,16 @@ from .extractor import LinkedInExtractor from .fields import ( - CompanyScrapingFields, - PersonScrapingFields, + COMPANY_SECTIONS, + PERSON_SECTIONS, parse_company_sections, parse_person_sections, ) __all__ = [ - "CompanyScrapingFields", + "COMPANY_SECTIONS", "LinkedInExtractor", - "PersonScrapingFields", + "PERSON_SECTIONS", "parse_company_sections", "parse_person_sections", ] diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 0c620a85..5a99daaf 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -15,12 +15,7 @@ scroll_to_bottom, ) -from .fields import ( - COMPANY_SECTION_MAP, - PERSON_SECTION_MAP, - CompanyScrapingFields, - PersonScrapingFields, -) +from .fields import COMPANY_SECTIONS, PERSON_SECTIONS logger = logging.getLogger(__name__) @@ -196,68 +191,18 @@ async def _extract_overlay_once(self, url: str) -> str: return _RATE_LIMITED_MSG return cleaned - async def scrape_person( - self, username: str, fields: PersonScrapingFields - ) -> dict[str, Any]: + async def scrape_person(self, username: str, requested: set[str]) -> dict[str, Any]: """Scrape a person profile with configurable sections. Returns: - {url, sections: {name: text}, pages_visited, sections_requested} + {url, sections: {name: text}} """ - fields |= PersonScrapingFields.BASIC_INFO + requested = requested | {"main_profile"} base_url = f"https://www.linkedin.com/in/{username}" sections: dict[str, str] = {} - pages_visited: list[str] = [] - - # Map flags to (section_name, url_suffix, is_overlay) - page_map: list[tuple[PersonScrapingFields, str, str, bool]] = [ - (PersonScrapingFields.BASIC_INFO, "main_profile", "/", False), - ( - PersonScrapingFields.EXPERIENCE, - "experience", - "/details/experience/", - False, - ), - ( - PersonScrapingFields.EDUCATION, - "education", - "/details/education/", - False, - ), - ( - PersonScrapingFields.INTERESTS, - "interests", - "/details/interests/", - False, - ), - ( - PersonScrapingFields.HONORS, - "honors", - "/details/honors/", - False, - ), - ( - PersonScrapingFields.LANGUAGES, - "languages", - "/details/languages/", - False, - ), - ( - PersonScrapingFields.CONTACT_INFO, - "contact_info", - "/overlay/contact-info/", - True, - ), - ( - PersonScrapingFields.POSTS, - "posts", - "/recent-activity/all/", - False, - ), - ] - - for flag, section_name, suffix, is_overlay in page_map: - if not (flag & fields): + + for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): + if section_name not in requested: continue url = base_url + suffix @@ -269,51 +214,33 @@ async def scrape_person( if text: sections[section_name] = text - pages_visited.append(url) except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - pages_visited.append(url) # Delay between navigations await asyncio.sleep(_NAV_DELAY) - # Build sections_requested from flags - requested = ["main_profile"] - reverse_map = {v: k for k, v in PERSON_SECTION_MAP.items()} - for flag in PersonScrapingFields: - if flag in fields and flag in reverse_map: - requested.append(reverse_map[flag]) - return { "url": f"{base_url}/", "sections": sections, - "pages_visited": pages_visited, - "sections_requested": requested, } async def scrape_company( - self, company_name: str, fields: CompanyScrapingFields + self, company_name: str, requested: set[str] ) -> dict[str, Any]: """Scrape a company profile with configurable sections. Returns: - {url, sections: {name: text}, pages_visited, sections_requested} + {url, sections: {name: text}} """ - fields |= CompanyScrapingFields.ABOUT + requested = requested | {"about"} base_url = f"https://www.linkedin.com/company/{company_name}" sections: dict[str, str] = {} - pages_visited: list[str] = [] - - page_map: list[tuple[CompanyScrapingFields, str, str]] = [ - (CompanyScrapingFields.ABOUT, "about", "/about/"), - (CompanyScrapingFields.POSTS, "posts", "/posts/"), - (CompanyScrapingFields.JOBS, "jobs", "/jobs/"), - ] - for flag, section_name, suffix in page_map: - if not (flag & fields): + for section_name, (suffix, _is_overlay) in COMPANY_SECTIONS.items(): + if section_name not in requested: continue url = base_url + suffix @@ -321,34 +248,23 @@ async def scrape_company( text = await self.extract_page(url) if text: sections[section_name] = text - pages_visited.append(url) except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - pages_visited.append(url) await asyncio.sleep(_NAV_DELAY) - # Build sections_requested from flags - requested = ["about"] - reverse_map = {v: k for k, v in COMPANY_SECTION_MAP.items()} - for flag in CompanyScrapingFields: - if flag in fields and flag in reverse_map: - requested.append(reverse_map[flag]) - return { "url": f"{base_url}/", "sections": sections, - "pages_visited": pages_visited, - "sections_requested": requested, } async def scrape_job(self, job_id: str) -> dict[str, Any]: """Scrape a single job posting. Returns: - {url, sections: {name: text}, pages_visited, sections_requested} + {url, sections: {name: text}} """ url = f"https://www.linkedin.com/jobs/view/{job_id}/" text = await self.extract_page(url) @@ -360,8 +276,6 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: return { "url": url, "sections": sections, - "pages_visited": [url], - "sections_requested": ["job_posting"], } async def search_jobs( @@ -370,7 +284,7 @@ async def search_jobs( """Search for jobs and extract the results page. Returns: - {url, sections: {name: text}, pages_visited, sections_requested} + {url, sections: {name: text}} """ params = f"keywords={quote_plus(keywords)}" if location: @@ -386,8 +300,6 @@ async def search_jobs( return { "url": url, "sections": sections, - "pages_visited": [url], - "sections_requested": ["search_results"], } async def search_people( @@ -398,7 +310,7 @@ async def search_people( """Search for people and extract the results page. Returns: - {url, sections: {name: text}, pages_visited, sections_requested} + {url, sections: {name: text}} """ params = f"keywords={quote_plus(keywords)}" if location: @@ -414,6 +326,4 @@ async def search_people( return { "url": url, "sections": sections, - "pages_visited": [url], - "sections_requested": ["search_results"], } diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py index cdee9ff0..3a351b0d 100644 --- a/linkedin_mcp_server/scraping/fields.py +++ b/linkedin_mcp_server/scraping/fields.py @@ -1,106 +1,89 @@ -"""Flag enums controlling which LinkedIn pages are visited during scraping.""" +"""Section config dicts controlling which LinkedIn pages are visited during scraping.""" import logging -from enum import Flag, auto logger = logging.getLogger(__name__) - -class PersonScrapingFields(Flag): - """Controls which pages are visited when scraping a person profile.""" - - BASIC_INFO = auto() # /in/{username}/ - EXPERIENCE = auto() # /in/{username}/details/experience/ - EDUCATION = auto() # /in/{username}/details/education/ - INTERESTS = auto() # /in/{username}/details/interests/ - HONORS = auto() # /in/{username}/details/honors/ - LANGUAGES = auto() # /in/{username}/details/languages/ - CONTACT_INFO = auto() # /in/{username}/overlay/contact-info/ - POSTS = auto() # /in/{username}/recent-activity/all/ - - -class CompanyScrapingFields(Flag): - """Controls which pages are visited when scraping a company.""" - - ABOUT = auto() # /company/{name}/about/ - POSTS = auto() # /company/{name}/posts/ - JOBS = auto() # /company/{name}/jobs/ - - -# Section name -> flag mapping -PERSON_SECTION_MAP: dict[str, PersonScrapingFields] = { - "experience": PersonScrapingFields.EXPERIENCE, - "education": PersonScrapingFields.EDUCATION, - "interests": PersonScrapingFields.INTERESTS, - "honors": PersonScrapingFields.HONORS, - "languages": PersonScrapingFields.LANGUAGES, - "contact_info": PersonScrapingFields.CONTACT_INFO, - "posts": PersonScrapingFields.POSTS, +# Maps section name -> (url_suffix, is_overlay) +PERSON_SECTIONS: dict[str, tuple[str, bool]] = { + "main_profile": ("/", False), + "experience": ("/details/experience/", False), + "education": ("/details/education/", False), + "interests": ("/details/interests/", False), + "honors": ("/details/honors/", False), + "languages": ("/details/languages/", False), + "contact_info": ("/overlay/contact-info/", True), + "posts": ("/recent-activity/all/", False), } -COMPANY_SECTION_MAP: dict[str, CompanyScrapingFields] = { - "posts": CompanyScrapingFields.POSTS, - "jobs": CompanyScrapingFields.JOBS, +COMPANY_SECTIONS: dict[str, tuple[str, bool]] = { + "about": ("/about/", False), + "posts": ("/posts/", False), + "jobs": ("/jobs/", False), } +# Optional sections (everything except baselines) +_PERSON_OPTIONAL = set(PERSON_SECTIONS) - {"main_profile"} +_COMPANY_OPTIONAL = set(COMPANY_SECTIONS) - {"about"} + def parse_person_sections( sections: str | None, -) -> tuple[PersonScrapingFields, list[str]]: - """Parse comma-separated section names into PersonScrapingFields. +) -> tuple[set[str], list[str]]: + """Parse comma-separated section names into a set of requested sections. - BASIC_INFO is always included. Empty/None returns BASIC_INFO only. + "main_profile" is always included. Empty/None returns {"main_profile"} only. Unknown section names are logged as warnings and returned. Returns: - Tuple of (flags, unknown_section_names). + Tuple of (requested_sections, unknown_section_names). """ - flags = PersonScrapingFields.BASIC_INFO + requested: set[str] = {"main_profile"} unknown: list[str] = [] if not sections: - return flags, unknown + return requested, unknown for name in sections.split(","): name = name.strip().lower() if not name: continue - if name in PERSON_SECTION_MAP: - flags |= PERSON_SECTION_MAP[name] + if name in _PERSON_OPTIONAL: + requested.add(name) else: unknown.append(name) logger.warning( "Unknown person section %r ignored. Valid: %s", name, - ", ".join(sorted(PERSON_SECTION_MAP)), + ", ".join(sorted(_PERSON_OPTIONAL)), ) - return flags, unknown + return requested, unknown def parse_company_sections( sections: str | None, -) -> tuple[CompanyScrapingFields, list[str]]: - """Parse comma-separated section names into CompanyScrapingFields. +) -> tuple[set[str], list[str]]: + """Parse comma-separated section names into a set of requested sections. - ABOUT is always included. Empty/None returns ABOUT only. + "about" is always included. Empty/None returns {"about"} only. Unknown section names are logged as warnings and returned. Returns: - Tuple of (flags, unknown_section_names). + Tuple of (requested_sections, unknown_section_names). """ - flags = CompanyScrapingFields.ABOUT + requested: set[str] = {"about"} unknown: list[str] = [] if not sections: - return flags, unknown + return requested, unknown for name in sections.split(","): name = name.strip().lower() if not name: continue - if name in COMPANY_SECTION_MAP: - flags |= COMPANY_SECTION_MAP[name] + if name in _COMPANY_OPTIONAL: + requested.add(name) else: unknown.append(name) logger.warning( "Unknown company section %r ignored. Valid: %s", name, - ", ".join(sorted(COMPANY_SECTION_MAP)), + ", ".join(sorted(_COMPANY_OPTIONAL)), ) - return flags, unknown + return requested, unknown diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 42aa4241..6ca6ea81 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -50,13 +50,13 @@ async def get_company_profile( Default (None) scrapes only the about page. Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). The LLM should parse the raw text in each section. """ try: await ensure_authenticated() - fields, unknown = parse_company_sections(sections) + requested, unknown = parse_company_sections(sections) logger.info( "Scraping company: %s (sections=%s)", @@ -71,7 +71,7 @@ async def get_company_profile( progress=0, total=100, message="Starting company profile scrape" ) - result = await extractor.scrape_company(company_name, fields) + result = await extractor.scrape_company(company_name, requested) if unknown: result["unknown_sections"] = unknown @@ -103,7 +103,7 @@ async def get_company_posts( ctx: FastMCP context for progress reporting Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). The LLM should parse the raw text to extract individual posts. """ try: @@ -130,8 +130,6 @@ async def get_company_posts( return { "url": url, "sections": sections, - "pages_visited": [url], - "sections_requested": ["posts"], } except Exception as e: diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 3eadf552..ef4af8a7 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -40,7 +40,7 @@ async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: ctx: FastMCP context for progress reporting Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). The LLM should parse the raw text to extract job details. """ try: @@ -86,7 +86,7 @@ async def search_jobs( location: Optional location filter (e.g., "San Francisco", "Remote") Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). The LLM should parse the raw text to extract job listings. """ try: diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 8ca5c856..206ebf5f 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -50,14 +50,14 @@ async def get_person_profile( Default (None) scrapes only the main profile page. Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). Sections may be absent if extraction yielded no content for that page. The LLM should parse the raw text in each section. """ try: await ensure_authenticated() - fields, unknown = parse_person_sections(sections) + requested, unknown = parse_person_sections(sections) logger.info( "Scraping profile: %s (sections=%s)", @@ -72,7 +72,7 @@ async def get_person_profile( progress=0, total=100, message="Starting person profile scrape" ) - result = await extractor.scrape_person(linkedin_username, fields) + result = await extractor.scrape_person(linkedin_username, requested) if unknown: result["unknown_sections"] = unknown @@ -106,7 +106,7 @@ async def search_people( location: Optional location filter (e.g., "New York", "Remote") Returns: - Dict with url, sections (name -> raw text), pages_visited, and sections_requested. + Dict with url and sections (name -> raw text). The LLM should parse the raw text to extract individual people and their profiles. """ try: diff --git a/tests/test_fields.py b/tests/test_fields.py index 0e729d3a..c3d2f076 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1,180 +1,137 @@ -"""Tests for scraping field flag enums and section parsers.""" +"""Tests for scraping section config dicts and section parsers.""" from linkedin_mcp_server.scraping.fields import ( - COMPANY_SECTION_MAP, - PERSON_SECTION_MAP, - CompanyScrapingFields, - PersonScrapingFields, + COMPANY_SECTIONS, + PERSON_SECTIONS, parse_company_sections, parse_person_sections, ) -class TestPersonScrapingFields: - def test_atomic_flags_are_distinct(self): - flags = [ - PersonScrapingFields.BASIC_INFO, - PersonScrapingFields.EXPERIENCE, - PersonScrapingFields.EDUCATION, - PersonScrapingFields.INTERESTS, - PersonScrapingFields.HONORS, - PersonScrapingFields.LANGUAGES, - PersonScrapingFields.CONTACT_INFO, - PersonScrapingFields.POSTS, - ] - for i, a in enumerate(flags): - for b in flags[i + 1 :]: - assert a & b == PersonScrapingFields(0) - - def test_flag_bitwise_or(self): - combined = PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO - assert PersonScrapingFields.BASIC_INFO in combined - assert PersonScrapingFields.CONTACT_INFO in combined - assert PersonScrapingFields.EXPERIENCE not in combined - - -class TestCompanyScrapingFields: - def test_atomic_flags_are_distinct(self): - flags = [ - CompanyScrapingFields.ABOUT, - CompanyScrapingFields.POSTS, - CompanyScrapingFields.JOBS, - ] - for i, a in enumerate(flags): - for b in flags[i + 1 :]: - assert a & b == CompanyScrapingFields(0) +class TestPersonSections: + def test_expected_keys(self): + expected = { + "main_profile", + "experience", + "education", + "interests", + "honors", + "languages", + "contact_info", + "posts", + } + assert set(PERSON_SECTIONS) == expected + + def test_contact_info_is_overlay(self): + _suffix, is_overlay = PERSON_SECTIONS["contact_info"] + assert is_overlay is True + + def test_non_overlay_sections(self): + for name, (_suffix, is_overlay) in PERSON_SECTIONS.items(): + if name != "contact_info": + assert is_overlay is False, f"{name} should not be an overlay" + + def test_all_suffixes_start_with_slash(self): + for name, (suffix, _) in PERSON_SECTIONS.items(): + assert suffix.startswith("/"), f"{name} suffix should start with /" + + +class TestCompanySections: + def test_expected_keys(self): + assert set(COMPANY_SECTIONS) == {"about", "posts", "jobs"} + + def test_no_overlays(self): + for name, (_suffix, is_overlay) in COMPANY_SECTIONS.items(): + assert is_overlay is False, f"{name} should not be an overlay" class TestParsePersonSections: - def test_none_returns_basic_info_only(self): - flags, unknown = parse_person_sections(None) - assert flags == PersonScrapingFields.BASIC_INFO + def test_none_returns_baseline_only(self): + requested, unknown = parse_person_sections(None) + assert requested == {"main_profile"} assert unknown == [] - def test_empty_string_returns_basic_info_only(self): - flags, unknown = parse_person_sections("") - assert flags == PersonScrapingFields.BASIC_INFO + def test_empty_string_returns_baseline_only(self): + requested, unknown = parse_person_sections("") + assert requested == {"main_profile"} assert unknown == [] def test_single_section(self): - flags, unknown = parse_person_sections("contact_info") - assert ( - flags == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.CONTACT_INFO - ) + requested, unknown = parse_person_sections("contact_info") + assert requested == {"main_profile", "contact_info"} assert unknown == [] def test_multiple_sections(self): - flags, unknown = parse_person_sections("experience,education") - expected = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - ) - assert flags == expected + requested, unknown = parse_person_sections("experience,education") + assert requested == {"main_profile", "experience", "education"} assert unknown == [] def test_invalid_names_returned(self): - flags, unknown = parse_person_sections("experience,bogus,education") - expected = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - ) - assert flags == expected + requested, unknown = parse_person_sections("experience,bogus,education") + assert requested == {"main_profile", "experience", "education"} assert unknown == ["bogus"] def test_multiple_invalid_names(self): - flags, unknown = parse_person_sections("experience,foo,bar") - assert ( - flags == PersonScrapingFields.BASIC_INFO | PersonScrapingFields.EXPERIENCE - ) + requested, unknown = parse_person_sections("experience,foo,bar") + assert requested == {"main_profile", "experience"} assert unknown == ["foo", "bar"] def test_whitespace_and_case_handling(self): - flags, unknown = parse_person_sections(" Experience , EDUCATION ") - expected = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - ) - assert flags == expected + requested, unknown = parse_person_sections(" Experience , EDUCATION ") + assert requested == {"main_profile", "experience", "education"} assert unknown == [] def test_all_sections(self): - flags, unknown = parse_person_sections( + requested, unknown = parse_person_sections( "experience,education,interests,honors,languages,contact_info,posts" ) - expected = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - | PersonScrapingFields.INTERESTS - | PersonScrapingFields.HONORS - | PersonScrapingFields.LANGUAGES - | PersonScrapingFields.CONTACT_INFO - | PersonScrapingFields.POSTS - ) - assert flags == expected + assert requested == set(PERSON_SECTIONS) assert unknown == [] class TestParseCompanySections: - def test_none_returns_about_only(self): - flags, unknown = parse_company_sections(None) - assert flags == CompanyScrapingFields.ABOUT + def test_none_returns_baseline_only(self): + requested, unknown = parse_company_sections(None) + assert requested == {"about"} assert unknown == [] - def test_empty_string_returns_about_only(self): - flags, unknown = parse_company_sections("") - assert flags == CompanyScrapingFields.ABOUT + def test_empty_string_returns_baseline_only(self): + requested, unknown = parse_company_sections("") + assert requested == {"about"} assert unknown == [] def test_single_section(self): - flags, unknown = parse_company_sections("posts") - assert flags == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + requested, unknown = parse_company_sections("posts") + assert requested == {"about", "posts"} assert unknown == [] def test_multiple_sections(self): - flags, unknown = parse_company_sections("posts,jobs") - expected = ( - CompanyScrapingFields.ABOUT - | CompanyScrapingFields.POSTS - | CompanyScrapingFields.JOBS - ) - assert flags == expected + requested, unknown = parse_company_sections("posts,jobs") + assert requested == {"about", "posts", "jobs"} assert unknown == [] def test_invalid_names_returned(self): - flags, unknown = parse_company_sections("posts,bogus") - assert flags == CompanyScrapingFields.ABOUT | CompanyScrapingFields.POSTS + requested, unknown = parse_company_sections("posts,bogus") + assert requested == {"about", "posts"} assert unknown == ["bogus"] def test_whitespace_and_case_handling(self): - flags, unknown = parse_company_sections(" Posts , JOBS ") - expected = ( - CompanyScrapingFields.ABOUT - | CompanyScrapingFields.POSTS - | CompanyScrapingFields.JOBS - ) - assert flags == expected + requested, unknown = parse_company_sections(" Posts , JOBS ") + assert requested == {"about", "posts", "jobs"} assert unknown == [] -class TestSectionMapCoverage: - """Ensure every non-baseline flag has a section map entry (drift risk).""" - - def test_person_section_map_covers_all_flags(self): - baseline = PersonScrapingFields.BASIC_INFO - mapped_flags = set(PERSON_SECTION_MAP.values()) - for flag in PersonScrapingFields: - if flag is baseline: - continue - assert flag in mapped_flags, f"{flag.name} missing from PERSON_SECTION_MAP" - - def test_company_section_map_covers_all_flags(self): - baseline = CompanyScrapingFields.ABOUT - mapped_flags = set(COMPANY_SECTION_MAP.values()) - for flag in CompanyScrapingFields: - if flag is baseline: - continue - assert flag in mapped_flags, f"{flag.name} missing from COMPANY_SECTION_MAP" +class TestConfigCompleteness: + """Ensure every config dict section has a valid suffix.""" + + def test_person_sections_all_have_suffixes(self): + for name, (suffix, _) in PERSON_SECTIONS.items(): + assert isinstance(suffix, str) and len(suffix) > 0, ( + f"{name} has empty suffix" + ) + + def test_company_sections_all_have_suffixes(self): + for name, (suffix, _) in COMPANY_SECTIONS.items(): + assert isinstance(suffix, str) and len(suffix) > 0, ( + f"{name} has empty suffix" + ) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 42673461..d827cc0e 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -9,10 +9,6 @@ _RATE_LIMITED_MSG, strip_linkedin_noise, ) -from linkedin_mcp_server.scraping.fields import ( - CompanyScrapingFields, - PersonScrapingFields, -) @pytest.fixture @@ -133,8 +129,6 @@ async def test_retry_succeeds_after_rate_limit(self, mock_page): async def evaluate_side_effect(*args, **kwargs): nonlocal call_count call_count += 1 - # First two calls are from first attempt (goto triggers evaluate via - # _extract_page_once), return noise. Third+ calls return real content. if call_count <= 1: return noise_only return "Education\nHarvard University\n1973 โ€“ 1975" @@ -168,18 +162,15 @@ async def evaluate_side_effect(*args, **kwargs): class TestScrapePersonUrls: - """Test that scrape_person visits the correct URLs per field combination.""" + """Test that scrape_person visits the correct URLs per section set.""" async def test_baseline_always_included(self, mock_page): - """Passing EXPERIENCE without BASIC_INFO still visits main profile.""" + """Passing only experience still visits main profile.""" extractor = LinkedInExtractor(mock_page) with ( patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="text", - ), + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract, patch.object( extractor, "_extract_overlay", @@ -187,12 +178,11 @@ async def test_baseline_always_included(self, mock_page): return_value="", ), ): - result = await extractor.scrape_person( - "testuser", PersonScrapingFields.EXPERIENCE - ) + result = await extractor.scrape_person("testuser", {"experience"}) - urls = result["pages_visited"] - assert any("/in/testuser/" in u for u in urls), "main profile should be visited" + urls = [call.args[0] for call in mock_extract.call_args_list] + assert "main_profile" in result["sections"] + assert any(u.endswith("/in/testuser/") for u in urls) assert any("/details/experience/" in u for u in urls) async def test_basic_info_only_visits_main_profile(self, mock_page): @@ -203,7 +193,7 @@ async def test_basic_info_only_visits_main_profile(self, mock_page): "extract_page", new_callable=AsyncMock, return_value="profile text", - ), + ) as mock_extract, patch.object( extractor, "_extract_overlay", @@ -211,28 +201,19 @@ async def test_basic_info_only_visits_main_profile(self, mock_page): return_value="", ), ): - result = await extractor.scrape_person( - "testuser", PersonScrapingFields.BASIC_INFO - ) + result = await extractor.scrape_person("testuser", {"main_profile"}) - assert len(result["pages_visited"]) == 1 - assert "https://www.linkedin.com/in/testuser/" in result["pages_visited"] - assert result["sections_requested"] == ["main_profile"] + urls = [call.args[0] for call in mock_extract.call_args_list] + assert len(urls) == 1 + assert urls[0].endswith("/in/testuser/") + assert set(result["sections"]) == {"main_profile"} - async def test_experience_education_visits_three_pages(self, mock_page): + async def test_experience_education_visits_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) - fields = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - ) with ( patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="text", - ), + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract, patch.object( extractor, "_extract_overlay", @@ -240,60 +221,55 @@ async def test_experience_education_visits_three_pages(self, mock_page): return_value="", ), ): - result = await extractor.scrape_person("testuser", fields) + result = await extractor.scrape_person( + "testuser", {"main_profile", "experience", "education"} + ) - urls = result["pages_visited"] + urls = [call.args[0] for call in mock_extract.call_args_list] assert len(urls) == 3 - assert any("/in/testuser/" in u for u in urls) + assert any(u.endswith("/in/testuser/") for u in urls) assert any("/details/experience/" in u for u in urls) assert any("/details/education/" in u for u in urls) - assert result["sections_requested"] == [ + assert set(result["sections"]) == {"main_profile", "experience", "education"} + + async def test_all_sections_visit_all_urls(self, mock_page): + extractor = LinkedInExtractor(mock_page) + all_sections = { "main_profile", "experience", "education", - ] - - async def test_all_flags_visit_all_pages(self, mock_page): - extractor = LinkedInExtractor(mock_page) - fields = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - | PersonScrapingFields.INTERESTS - | PersonScrapingFields.HONORS - | PersonScrapingFields.LANGUAGES - | PersonScrapingFields.CONTACT_INFO - | PersonScrapingFields.POSTS - ) + "interests", + "honors", + "languages", + "contact_info", + "posts", + } with ( patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="text", - ), + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, return_value="contact text", - ), + ) as mock_overlay, ): - result = await extractor.scrape_person("testuser", fields) - - urls = result["pages_visited"] - # main_profile, experience, education, interests, honors, languages, contact_info, posts - assert len(urls) == 8 - assert result["sections_requested"] == [ - "main_profile", - "experience", - "education", - "interests", - "honors", - "languages", - "contact_info", - "posts", - ] + result = await extractor.scrape_person("testuser", all_sections) + + page_urls = [call.args[0] for call in mock_extract.call_args_list] + overlay_urls = [call.args[0] for call in mock_overlay.call_args_list] + all_urls = page_urls + overlay_urls + # Verify each expected suffix was navigated + assert any(u.endswith("/in/testuser/") for u in all_urls) + assert any("/details/experience/" in u for u in all_urls) + assert any("/details/education/" in u for u in all_urls) + assert any("/details/interests/" in u for u in all_urls) + assert any("/details/honors/" in u for u in all_urls) + assert any("/details/languages/" in u for u in all_urls) + assert any("/overlay/contact-info/" in u for u in overlay_urls) + assert any("/recent-activity/all/" in u for u in all_urls) + assert set(result["sections"]) == all_sections async def test_posts_visits_recent_activity(self, mock_page): extractor = LinkedInExtractor(mock_page) @@ -302,31 +278,22 @@ async def test_posts_visits_recent_activity(self, mock_page): "extract_page", new_callable=AsyncMock, return_value="Post 1\nPost 2", - ): - result = await extractor.scrape_person( - "test-user", PersonScrapingFields.POSTS - ) - assert any("/recent-activity/all/" in url for url in result["pages_visited"]) + ) as mock_extract: + result = await extractor.scrape_person("test-user", {"posts"}) + + urls = [call.args[0] for call in mock_extract.call_args_list] + assert any("/recent-activity/all/" in url for url in urls) assert "posts" in result["sections"] - assert result["sections_requested"] == ["main_profile", "posts"] async def test_error_isolation(self, mock_page): """One section failing doesn't block others.""" - call_count = 0 async def extract_with_failure(url): - nonlocal call_count - call_count += 1 if "experience" in url: raise Exception("Simulated failure") return f"text for {url}" extractor = LinkedInExtractor(mock_page) - fields = ( - PersonScrapingFields.BASIC_INFO - | PersonScrapingFields.EXPERIENCE - | PersonScrapingFields.EDUCATION - ) with ( patch.object( extractor, @@ -340,32 +307,30 @@ async def extract_with_failure(url): return_value="", ), ): - result = await extractor.scrape_person("testuser", fields) + result = await extractor.scrape_person( + "testuser", {"main_profile", "experience", "education"} + ) - # All 3 pages should be visited even though experience failed - assert len(result["pages_visited"]) == 3 # main_profile and education should have sections, experience should not assert "main_profile" in result["sections"] assert "education" in result["sections"] + assert "experience" not in result["sections"] class TestScrapeCompany: async def test_company_baseline_always_included(self, mock_page): - """Passing POSTS without ABOUT still visits about page.""" + """Passing only posts still visits about page.""" extractor = LinkedInExtractor(mock_page) with patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="text", - ): - result = await extractor.scrape_company( - "testcorp", CompanyScrapingFields.POSTS - ) + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract: + result = await extractor.scrape_company("testcorp", {"posts"}) - urls = result["pages_visited"] - assert any("/about/" in u for u in urls), "about page should be visited" + urls = [call.args[0] for call in mock_extract.call_args_list] + assert any("/about/" in u for u in urls) assert any("/posts/" in u for u in urls) + assert "about" in result["sections"] + assert "posts" in result["sections"] async def test_about_only_visits_about(self, mock_page): extractor = LinkedInExtractor(mock_page) @@ -374,32 +339,29 @@ async def test_about_only_visits_about(self, mock_page): "extract_page", new_callable=AsyncMock, return_value="about text", - ): - result = await extractor.scrape_company( - "testcorp", CompanyScrapingFields.ABOUT - ) + ) as mock_extract: + result = await extractor.scrape_company("testcorp", {"about"}) - assert len(result["pages_visited"]) == 1 - assert any("/about/" in u for u in result["pages_visited"]) - assert result["sections_requested"] == ["about"] + urls = [call.args[0] for call in mock_extract.call_args_list] + assert len(urls) == 1 + assert "/about/" in urls[0] + assert set(result["sections"]) == {"about"} - async def test_all_flags_visit_about_posts_jobs(self, mock_page): + async def test_all_sections_visit_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) - fields = ( - CompanyScrapingFields.ABOUT - | CompanyScrapingFields.POSTS - | CompanyScrapingFields.JOBS - ) with patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="text", - ): - result = await extractor.scrape_company("testcorp", fields) + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract: + result = await extractor.scrape_company( + "testcorp", {"about", "posts", "jobs"} + ) - assert len(result["pages_visited"]) == 3 - assert result["sections_requested"] == ["about", "posts", "jobs"] + urls = [call.args[0] for call in mock_extract.call_args_list] + assert len(urls) == 3 + assert any("/about/" in u for u in urls) + assert any("/posts/" in u for u in urls) + assert any("/jobs/" in u for u in urls) + assert set(result["sections"]) == {"about", "posts", "jobs"} class TestScrapeJob: @@ -415,7 +377,8 @@ async def test_scrape_job(self, mock_page): assert result["url"] == "https://www.linkedin.com/jobs/view/12345/" assert "job_posting" in result["sections"] - assert result["sections_requested"] == ["job_posting"] + assert "pages_visited" not in result + assert "sections_requested" not in result async def test_search_jobs(self, mock_page): extractor = LinkedInExtractor(mock_page) @@ -430,7 +393,8 @@ async def test_search_jobs(self, mock_page): assert "keywords=python" in result["url"] assert "location=Remote" in result["url"] assert "search_results" in result["sections"] - assert result["sections_requested"] == ["search_results"] + assert "pages_visited" not in result + assert "sections_requested" not in result class TestStripLinkedInNoise: diff --git a/tests/test_tools.py b/tests/test_tools.py index 9f0f1b7f..2764ab6e 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -52,8 +52,6 @@ async def test_get_person_profile_success( expected = { "url": "https://www.linkedin.com/in/test-user/", "sections": {"main_profile": "John Doe\nSoftware Engineer"}, - "pages_visited": ["https://www.linkedin.com/in/test-user/"], - "sections_requested": ["main_profile"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -70,7 +68,8 @@ async def test_get_person_profile_success( result = await tool_fn("test-user", mock_context) assert result["url"] == "https://www.linkedin.com/in/test-user/" assert "main_profile" in result["sections"] - assert result["sections_requested"] == ["main_profile"] + assert "pages_visited" not in result + assert "sections_requested" not in result async def test_get_person_profile_with_sections( self, mock_context, patch_tool_deps, monkeypatch @@ -83,12 +82,6 @@ async def test_get_person_profile_with_sections( "experience": "Work history", "contact_info": "Email: test@test.com", }, - "pages_visited": [ - "https://www.linkedin.com/in/test-user/", - "https://www.linkedin.com/in/test-user/details/experience/", - "https://www.linkedin.com/in/test-user/overlay/contact-info/", - ], - "sections_requested": ["main_profile", "experience", "contact_info"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -105,12 +98,14 @@ async def test_get_person_profile_with_sections( result = await tool_fn( "test-user", mock_context, sections="experience,contact_info" ) - assert result["sections_requested"] == [ - "main_profile", - "experience", - "contact_info", - ] - mock_extractor.scrape_person.assert_awaited_once() + assert "main_profile" in result["sections"] + assert "experience" in result["sections"] + assert "contact_info" in result["sections"] + # Verify scrape_person was called with a set[str] + call_args = mock_extractor.scrape_person.call_args + assert isinstance(call_args[0][1], set) + assert "experience" in call_args[0][1] + assert "contact_info" in call_args[0][1] async def test_get_person_profile_error(self, mock_context, monkeypatch): from linkedin_mcp_server.exceptions import SessionExpiredError @@ -133,10 +128,6 @@ async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): expected = { "url": "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York", "sections": {"search_results": "Jane Doe\nAI Engineer at Acme\nNew York"}, - "pages_visited": [ - "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York" - ], - "sections_requested": ["search_results"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -152,6 +143,7 @@ async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): tool_fn = await get_tool_fn(mcp, "search_people") result = await tool_fn("AI engineer", mock_context, location="New York") assert "search_results" in result["sections"] + assert "pages_visited" not in result mock_extractor.search_people.assert_awaited_once_with("AI engineer", "New York") @@ -162,8 +154,6 @@ async def test_get_company_profile( expected = { "url": "https://www.linkedin.com/company/testcorp/", "sections": {"about": "TestCorp\nWe build things"}, - "pages_visited": ["https://www.linkedin.com/company/testcorp/about/"], - "sections_requested": ["about"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -179,6 +169,7 @@ async def test_get_company_profile( tool_fn = await get_tool_fn(mcp, "get_company_profile") result = await tool_fn("testcorp", mock_context) assert "about" in result["sections"] + assert "pages_visited" not in result async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatch): mock_extractor = MagicMock() @@ -197,7 +188,8 @@ async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatc result = await tool_fn("testcorp", mock_context) assert "posts" in result["sections"] assert result["sections"]["posts"] == "Post 1\nPost 2" - assert result["sections_requested"] == ["posts"] + assert "pages_visited" not in result + assert "sections_requested" not in result class TestJobTools: @@ -205,8 +197,6 @@ async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch) expected = { "url": "https://www.linkedin.com/jobs/view/12345/", "sections": {"job_posting": "Software Engineer\nGreat opportunity"}, - "pages_visited": ["https://www.linkedin.com/jobs/view/12345/"], - "sections_requested": ["job_posting"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -222,13 +212,12 @@ async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch) tool_fn = await get_tool_fn(mcp, "get_job_details") result = await tool_fn("12345", mock_context) assert "job_posting" in result["sections"] + assert "pages_visited" not in result async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): expected = { "url": "https://www.linkedin.com/jobs/search/?keywords=python", "sections": {"search_results": "Job 1\nJob 2"}, - "pages_visited": ["https://www.linkedin.com/jobs/search/?keywords=python"], - "sections_requested": ["search_results"], } mock_extractor = _make_mock_extractor(expected) monkeypatch.setattr( @@ -244,3 +233,4 @@ async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): tool_fn = await get_tool_fn(mcp, "search_jobs") result = await tool_fn("python", mock_context, location="Remote") assert "search_results" in result["sections"] + assert "pages_visited" not in result From 5b3202d9ce7ed0dd6c6f963e85de7ca403261543 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 17:37:39 +0100 Subject: [PATCH 415/565] fix(scraping): Address PR review feedback - Validate section names against full config dicts instead of optional-only subsets so baseline names (main_profile, about) don't produce spurious unknown warnings - Remove unused _PERSON_OPTIONAL/_COMPANY_OPTIONAL sets - Fix test name reference in CONTRIBUTING.md checklist - Add assert_awaited_once to test_get_person_profile_with_sections - Add tests for baseline sections passed explicitly Resolves: #180 --- CONTRIBUTING.md | 2 +- linkedin_mcp_server/scraping/fields.py | 12 ++++-------- tests/test_fields.py | 10 ++++++++++ tests/test_tools.py | 3 ++- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 47cf22a1..8af69697 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -69,7 +69,7 @@ When adding a section to an existing tool (e.g., adding "certifications" to `get - [ ] Add to `test_expected_keys` (`tests/test_fields.py`) - [ ] Add to `test_all_sections` parse test (`tests/test_fields.py`) -- [ ] Update `test_all_sections_visit_all_pages` โ€” add section to set, update assertions (`tests/test_scraping.py`) +- [ ] Update `test_all_sections_visit_all_urls` โ€” add section to set, update assertions (`tests/test_scraping.py`) - [ ] Add dedicated navigation test (e.g., `test_certifications_visits_details_page`) (`tests/test_scraping.py`) ### Docs diff --git a/linkedin_mcp_server/scraping/fields.py b/linkedin_mcp_server/scraping/fields.py index 3a351b0d..a0c986eb 100644 --- a/linkedin_mcp_server/scraping/fields.py +++ b/linkedin_mcp_server/scraping/fields.py @@ -22,10 +22,6 @@ "jobs": ("/jobs/", False), } -# Optional sections (everything except baselines) -_PERSON_OPTIONAL = set(PERSON_SECTIONS) - {"main_profile"} -_COMPANY_OPTIONAL = set(COMPANY_SECTIONS) - {"about"} - def parse_person_sections( sections: str | None, @@ -46,14 +42,14 @@ def parse_person_sections( name = name.strip().lower() if not name: continue - if name in _PERSON_OPTIONAL: + if name in PERSON_SECTIONS: requested.add(name) else: unknown.append(name) logger.warning( "Unknown person section %r ignored. Valid: %s", name, - ", ".join(sorted(_PERSON_OPTIONAL)), + ", ".join(sorted(PERSON_SECTIONS)), ) return requested, unknown @@ -77,13 +73,13 @@ def parse_company_sections( name = name.strip().lower() if not name: continue - if name in _COMPANY_OPTIONAL: + if name in COMPANY_SECTIONS: requested.add(name) else: unknown.append(name) logger.warning( "Unknown company section %r ignored. Valid: %s", name, - ", ".join(sorted(_COMPANY_OPTIONAL)), + ", ".join(sorted(COMPANY_SECTIONS)), ) return requested, unknown diff --git a/tests/test_fields.py b/tests/test_fields.py index c3d2f076..9a128c01 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -81,6 +81,11 @@ def test_whitespace_and_case_handling(self): assert requested == {"main_profile", "experience", "education"} assert unknown == [] + def test_baseline_passed_explicitly_not_unknown(self): + requested, unknown = parse_person_sections("main_profile,experience") + assert requested == {"main_profile", "experience"} + assert unknown == [] + def test_all_sections(self): requested, unknown = parse_person_sections( "experience,education,interests,honors,languages,contact_info,posts" @@ -115,6 +120,11 @@ def test_invalid_names_returned(self): assert requested == {"about", "posts"} assert unknown == ["bogus"] + def test_baseline_passed_explicitly_not_unknown(self): + requested, unknown = parse_company_sections("about,posts") + assert requested == {"about", "posts"} + assert unknown == [] + def test_whitespace_and_case_handling(self): requested, unknown = parse_company_sections(" Posts , JOBS ") assert requested == {"about", "posts", "jobs"} diff --git a/tests/test_tools.py b/tests/test_tools.py index 2764ab6e..8f653777 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -101,7 +101,8 @@ async def test_get_person_profile_with_sections( assert "main_profile" in result["sections"] assert "experience" in result["sections"] assert "contact_info" in result["sections"] - # Verify scrape_person was called with a set[str] + # Verify scrape_person was called exactly once with a set[str] + mock_extractor.scrape_person.assert_awaited_once() call_args = mock_extractor.scrape_person.call_args assert isinstance(call_args[0][1], set) assert "experience" in call_args[0][1] From d02127d8c60417a42d3907b148e393c380928de8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 17:42:52 +0100 Subject: [PATCH 416/565] docs(agents): Update scraping architecture description Replace "Flag-based section selection" with "explicit section selection" to match refactored config dict approach. --- AGENTS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 0189918e..b81474b2 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -41,7 +41,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis - `tools/` - LinkedIn scraping tools (person, company, job profiles) - `drivers/browser.py` - Patchright browser management with persistent profile (singleton) - `core/` - Inlined browser, auth, and utility code (replaces `linkedin_scraper` dependency) -- `scraping/` - innerText extraction engine with Flag-based section selection +- `scraping/` - innerText extraction engine with explicit section selection - `config/` - Configuration management (schema, loaders) - `authentication.py` - LinkedIn profile-based authentication From 15709aad2d0337ff63211633929e2fe30d19d4bd Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 18:10:23 +0100 Subject: [PATCH 417/565] fix(scraping): Address review findings and patch test sleeps - Dispatch on is_overlay in scrape_company for consistency - Move nav delay before each navigation except first - Patch asyncio.sleep in TestScrapePersonUrls/TestScrapeCompany - Document unknown_sections in return format docs Resolves: #180 --- AGENTS.md | 2 +- CONTRIBUTING.md | 2 + linkedin_mcp_server/scraping/extractor.py | 23 +++++-- tests/test_scraping.py | 80 ++++++++++++++++++----- 4 files changed, 81 insertions(+), 26 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index b81474b2..6fd6e73d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -65,7 +65,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** -All scraping tools return: `{url, sections: {name: raw_text}}` +All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown section names are provided, an `unknown_sections: [name, ...]` key is also included. **Scraping Architecture (`scraping/`):** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8af69697..9d1d8107 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,6 +54,8 @@ for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): ```python {"url": str, "sections": {name: raw_text}} +# When unknown section names are provided: +{"url": str, "sections": {name: raw_text}, "unknown_sections": [name, ...]} ``` ## Checklist: Adding a New Section diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 5a99daaf..7634c9cf 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -201,10 +201,15 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A base_url = f"https://www.linkedin.com/in/{username}" sections: dict[str, str] = {} + first = True for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): if section_name not in requested: continue + if not first: + await asyncio.sleep(_NAV_DELAY) + first = False + url = base_url + suffix try: if is_overlay: @@ -219,9 +224,6 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - # Delay between navigations - await asyncio.sleep(_NAV_DELAY) - return { "url": f"{base_url}/", "sections": sections, @@ -239,13 +241,22 @@ async def scrape_company( base_url = f"https://www.linkedin.com/company/{company_name}" sections: dict[str, str] = {} - for section_name, (suffix, _is_overlay) in COMPANY_SECTIONS.items(): + first = True + for section_name, (suffix, is_overlay) in COMPANY_SECTIONS.items(): if section_name not in requested: continue + if not first: + await asyncio.sleep(_NAV_DELAY) + first = False + url = base_url + suffix try: - text = await self.extract_page(url) + if is_overlay: + text = await self._extract_overlay(url) + else: + text = await self.extract_page(url) + if text: sections[section_name] = text except LinkedInScraperException: @@ -253,8 +264,6 @@ async def scrape_company( except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - await asyncio.sleep(_NAV_DELAY) - return { "url": f"{base_url}/", "sections": sections, diff --git a/tests/test_scraping.py b/tests/test_scraping.py index d827cc0e..7cc5f14c 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -177,6 +177,10 @@ async def test_baseline_always_included(self, mock_page): new_callable=AsyncMock, return_value="", ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): result = await extractor.scrape_person("testuser", {"experience"}) @@ -200,6 +204,10 @@ async def test_basic_info_only_visits_main_profile(self, mock_page): new_callable=AsyncMock, return_value="", ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): result = await extractor.scrape_person("testuser", {"main_profile"}) @@ -220,6 +228,10 @@ async def test_experience_education_visits_correct_urls(self, mock_page): new_callable=AsyncMock, return_value="", ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): result = await extractor.scrape_person( "testuser", {"main_profile", "experience", "education"} @@ -254,6 +266,10 @@ async def test_all_sections_visit_all_urls(self, mock_page): new_callable=AsyncMock, return_value="contact text", ) as mock_overlay, + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): result = await extractor.scrape_person("testuser", all_sections) @@ -273,12 +289,18 @@ async def test_all_sections_visit_all_urls(self, mock_page): async def test_posts_visits_recent_activity(self, mock_page): extractor = LinkedInExtractor(mock_page) - with patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="Post 1\nPost 2", - ) as mock_extract: + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="Post 1\nPost 2", + ) as mock_extract, + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): result = await extractor.scrape_person("test-user", {"posts"}) urls = [call.args[0] for call in mock_extract.call_args_list] @@ -306,6 +328,10 @@ async def extract_with_failure(url): new_callable=AsyncMock, return_value="", ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): result = await extractor.scrape_person( "testuser", {"main_profile", "experience", "education"} @@ -321,9 +347,15 @@ class TestScrapeCompany: async def test_company_baseline_always_included(self, mock_page): """Passing only posts still visits about page.""" extractor = LinkedInExtractor(mock_page) - with patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" - ) as mock_extract: + with ( + patch.object( + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract, + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): result = await extractor.scrape_company("testcorp", {"posts"}) urls = [call.args[0] for call in mock_extract.call_args_list] @@ -334,12 +366,18 @@ async def test_company_baseline_always_included(self, mock_page): async def test_about_only_visits_about(self, mock_page): extractor = LinkedInExtractor(mock_page) - with patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="about text", - ) as mock_extract: + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value="about text", + ) as mock_extract, + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): result = await extractor.scrape_company("testcorp", {"about"}) urls = [call.args[0] for call in mock_extract.call_args_list] @@ -349,9 +387,15 @@ async def test_about_only_visits_about(self, mock_page): async def test_all_sections_visit_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) - with patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" - ) as mock_extract: + with ( + patch.object( + extractor, "extract_page", new_callable=AsyncMock, return_value="text" + ) as mock_extract, + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): result = await extractor.scrape_company( "testcorp", {"about", "posts", "jobs"} ) From adf65e64e8cb2cb7b9963ad14b4437ed4305160c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 18:57:22 +0100 Subject: [PATCH 418/565] test(scraping): Add navigation count assertions Assert exact number of page and overlay navigations in test_all_sections_visit_all_urls to catch duplicate visits. --- tests/test_scraping.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 7cc5f14c..33b05a26 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -276,6 +276,9 @@ async def test_all_sections_visit_all_urls(self, mock_page): page_urls = [call.args[0] for call in mock_extract.call_args_list] overlay_urls = [call.args[0] for call in mock_overlay.call_args_list] all_urls = page_urls + overlay_urls + # 7 full-page sections + 1 overlay (contact_info) + assert len(page_urls) == 7 + assert len(overlay_urls) == 1 # Verify each expected suffix was navigated assert any(u.endswith("/in/testuser/") for u in all_urls) assert any("/details/experience/" in u for u in all_urls) From 9d29d1402ab70d0c8d26380f9141dd51cc737747 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 19:11:29 +0100 Subject: [PATCH 419/565] fix(tools): Document and test unknown_sections return key - Add unknown_sections to tool docstrings in person.py and company.py - Add integration tests for unknown section names in both tools - Document Greptile review endpoints in AGENTS.md --- AGENTS.md | 10 +++++++ linkedin_mcp_server/tools/company.py | 1 + linkedin_mcp_server/tools/person.py | 1 + tests/test_tools.py | 44 ++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index 6fd6e73d..350d94ae 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -138,6 +138,16 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec 8. Manually review the PR and merge it if it's approved. Do not squash the commits. 9. Delete the branch after the PR is merged. +## PR Reviews + +Greptile posts initial reviews as PR review comments, but follow-ups as **issue comments**. Always check both. To trigger a re-review, comment `@greptileai review` on the PR. + +```bash +gh api repos/{owner}/{repo}/pulls/{pr}/reviews # initial reviews +gh api repos/{owner}/{repo}/pulls/{pr}/comments # inline comments +gh api repos/{owner}/{repo}/issues/{pr}/comments # follow-up reviews +``` + ## btca When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 6ca6ea81..9699ee5c 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -51,6 +51,7 @@ async def get_company_profile( Returns: Dict with url and sections (name -> raw text). + Includes unknown_sections list when unrecognised names are passed. The LLM should parse the raw text in each section. """ try: diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 206ebf5f..3fddf2ad 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -52,6 +52,7 @@ async def get_person_profile( Returns: Dict with url and sections (name -> raw text). Sections may be absent if extraction yielded no content for that page. + Includes unknown_sections list when unrecognised names are passed. The LLM should parse the raw text in each section. """ try: diff --git a/tests/test_tools.py b/tests/test_tools.py index 8f653777..c1950326 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -108,6 +108,28 @@ async def test_get_person_profile_with_sections( assert "experience" in call_args[0][1] assert "contact_info" in call_args[0][1] + async def test_get_person_profile_unknown_section( + self, mock_context, patch_tool_deps, monkeypatch + ): + expected = { + "url": "https://www.linkedin.com/in/test-user/", + "sections": {"main_profile": "John Doe"}, + } + mock_extractor = _make_mock_extractor(expected) + monkeypatch.setattr( + "linkedin_mcp_server.tools.person.LinkedInExtractor", + lambda *a, **kw: mock_extractor, + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_person_profile") + result = await tool_fn("test-user", mock_context, sections="bogus_section") + assert result["unknown_sections"] == ["bogus_section"] + async def test_get_person_profile_error(self, mock_context, monkeypatch): from linkedin_mcp_server.exceptions import SessionExpiredError @@ -172,6 +194,28 @@ async def test_get_company_profile( assert "about" in result["sections"] assert "pages_visited" not in result + async def test_get_company_profile_unknown_section( + self, mock_context, patch_tool_deps, monkeypatch + ): + expected = { + "url": "https://www.linkedin.com/company/testcorp/", + "sections": {"about": "TestCorp\nWe build things"}, + } + mock_extractor = _make_mock_extractor(expected) + monkeypatch.setattr( + "linkedin_mcp_server.tools.company.LinkedInExtractor", + lambda *a, **kw: mock_extractor, + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_profile") + result = await tool_fn("testcorp", mock_context, sections="bogus") + assert result["unknown_sections"] == ["bogus"] + async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatch): mock_extractor = MagicMock() mock_extractor.extract_page = AsyncMock(return_value="Post 1\nPost 2") From 0c0e1257b8ebd04d743984e8a53978622e4eb07b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 19:23:44 +0100 Subject: [PATCH 420/565] test(scraping): Add missing _extract_overlay mock Patch _extract_overlay in test_posts_visits_recent_activity for consistency with other TestScrapePersonUrls tests. --- tests/test_scraping.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 33b05a26..c97440dd 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -299,6 +299,12 @@ async def test_posts_visits_recent_activity(self, mock_page): new_callable=AsyncMock, return_value="Post 1\nPost 2", ) as mock_extract, + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value="", + ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", new_callable=AsyncMock, From d303613eb7f6f9485c4ca7a9a09a4351faa5f719 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 19:43:07 +0100 Subject: [PATCH 421/565] docs(manifest): Add people search to top-level description --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 313bc072..8eb90bfb 100644 --- a/manifest.json +++ b/manifest.json @@ -3,7 +3,7 @@ "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", "version": "4.1.2", - "description": "Connect Claude to LinkedIn for profiles, companies, and job details", + "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.1.2", "author": { "name": "Daniel Sticker", From 777f9ded446ef13a04540a1f6f44338f26d5fa72 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 19:53:21 +0100 Subject: [PATCH 422/565] docs(manifest): Add people, search, posts keywords --- manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifest.json b/manifest.json index 8eb90bfb..f73655d5 100644 --- a/manifest.json +++ b/manifest.json @@ -14,7 +14,7 @@ "documentation": "https://github.com/stickerdaniel/linkedin-mcp-server#readme", "support": "https://github.com/stickerdaniel/linkedin-mcp-server/issues", "license": "MIT", - "keywords": ["linkedin", "scraping", "mcp", "profiles", "companies", "jobs"], + "keywords": ["linkedin", "scraping", "mcp", "profiles", "companies", "jobs", "people", "search", "posts"], "icon": "assets/icons/linkedin.svg", "screenshots": ["assets/screenshots/screenshot.png"], "server": { From e18519d929a21fc457b0e0ca74ce8a09afa94daf Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 4 Mar 2026 19:00:18 +0000 Subject: [PATCH 423/565] chore(deps): lock file maintenance --- uv.lock | 896 ++++++++++++++++++++++---------------------------------- 1 file changed, 350 insertions(+), 546 deletions(-) diff --git a/uv.lock b/uv.lock index eab45f07..742d0d99 100644 --- a/uv.lock +++ b/uv.lock @@ -1,7 +1,19 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.12" +[[package]] +name = "aiofile" +version = "3.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "caio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/e2/d7cb819de8df6b5c1968a2756c3cb4122d4fa2b8fc768b53b7c9e5edb646/aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b", size = 17943, upload-time = "2024-10-08T10:39:35.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/25/da1f0b4dd970e52bf5a36c204c107e11a0c6d3ed195eba0bfbc664c312b2/aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa", size = 19539, upload-time = "2024-10-08T10:39:32.955Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -109,15 +121,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "annotated-doc" -version = "0.0.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -160,14 +163,14 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.8" +version = "1.6.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6b/6c/c88eac87468c607f88bc24df1f3b31445ee6fc9ba123b09e666adf687cd9/authlib-1.6.8.tar.gz", hash = "sha256:41ae180a17cf672bc784e4a518e5c82687f1fe1e98b0cafaeda80c8e4ab2d1cb", size = 165074, upload-time = "2026-02-14T04:02:17.941Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/73/f7084bf12755113cd535ae586782ff3a6e710bfbe6a0d13d1c2f81ffbbfa/authlib-1.6.8-py2.py3-none-any.whl", hash = "sha256:97286fd7a15e6cfefc32771c8ef9c54f0ed58028f1322de6a2a7c969c3817888", size = 244116, upload-time = "2026-02-14T04:02:15.579Z" }, + { url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" }, ] [[package]] @@ -181,33 +184,48 @@ wheels = [ [[package]] name = "blessed" -version = "1.30.0" +version = "1.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinxed", marker = "sys_platform == 'win32'" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/19/e926a0dbbf93c7aeb15d4dfff0d0e3de02653b3ba540b687307d0819c1ff/blessed-1.30.0.tar.gz", hash = "sha256:4d547019d7b40fc5420ea2ba2bc180fdccc31d6715298e2b49ffa7b020d44667", size = 13948932, upload-time = "2026-02-06T19:40:23.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/0c/658dea9ba35fcea19e6feaa8ba0d2dbf8cac9aeaa1f9ab1d77d36f534757/blessed-1.32.0.tar.gz", hash = "sha256:d4090e9908cf86bea15a5275845c8bfc69c4c34eb6d22de07c65d26f1e54a918", size = 13979999, upload-time = "2026-02-28T20:59:01.815Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/b0/8d87c7c8015ce8d4b2c5ee7a82a1d955f10138322c4f0cb387d7d2c1b2e7/blessed-1.30.0-py3-none-any.whl", hash = "sha256:4061a9f10dd22798716c2548ba36385af6a29d856c897f367c6ccc927e0b3a5a", size = 98399, upload-time = "2026-02-06T19:40:20.815Z" }, + { url = "https://files.pythonhosted.org/packages/f6/47/de8f185a1f537fdb5117fcde7050472b8cde3561179e9a68e1a566a6e6c6/blessed-1.32.0-py3-none-any.whl", hash = "sha256:c6fdc18838491ebc7f0460234917eff4e172074934f5f80e82672417bd74be70", size = 111172, upload-time = "2026-02-28T20:58:58.59Z" }, ] [[package]] name = "cachetools" -version = "7.0.1" +version = "7.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/07/56595285564e90777d758ebd383d6b0b971b87729bbe2184a849932a3736/cachetools-7.0.1.tar.gz", hash = "sha256:e31e579d2c5b6e2944177a0397150d312888ddf4e16e12f1016068f0c03b8341", size = 36126, upload-time = "2026-02-10T22:24:05.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/c7/342b33cc6877eebc6c9bb45cb9f78e170e575839699f6f3cc96050176431/cachetools-7.0.2.tar.gz", hash = "sha256:7e7f09a4ca8b791d8bb4864afc71e9c17e607a28e6839ca1a644253c97dbeae0", size = 36983, upload-time = "2026-03-02T19:45:16.926Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/9e/5faefbf9db1db466d633735faceda1f94aa99ce506ac450d232536266b32/cachetools-7.0.1-py3-none-any.whl", hash = "sha256:8f086515c254d5664ae2146d14fc7f65c9a4bce75152eb247e5a9c5e6d7b2ecf", size = 13484, upload-time = "2026-02-10T22:24:03.741Z" }, + { url = "https://files.pythonhosted.org/packages/ef/04/4b6968e77c110f12da96fdbfcb39c6557c2e5e81bd7afcf8ed893d5bc588/cachetools-7.0.2-py3-none-any.whl", hash = "sha256:938dcad184827c5e94928c4fd5526e2b46692b7fb1ae94472da9131d0299343c", size = 13793, upload-time = "2026-03-02T19:45:15.495Z" }, +] + +[[package]] +name = "caio" +version = "0.9.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db339a1df8bd1ae49d146fcea9d6a5c40e3a80aaeb38d/caio-0.9.25.tar.gz", hash = "sha256:16498e7f81d1d0f5a4c0ad3f2540e65fe25691376e0a5bd367f558067113ed10", size = 26781, upload-time = "2025-12-26T15:21:36.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" }, ] [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -276,63 +294,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, ] -[[package]] -name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, -] - [[package]] name = "click" version = "8.3.1" @@ -345,15 +306,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] -[[package]] -name = "cloudpickle" -version = "3.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -447,19 +399,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, ] -[[package]] -name = "croniter" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "python-dateutil" }, - { name = "pytz" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, -] - [[package]] name = "cryptography" version = "46.0.5" @@ -515,7 +454,7 @@ wheels = [ [[package]] name = "cyclopts" -version = "4.5.2" +version = "4.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -523,18 +462,9 @@ dependencies = [ { name = "rich" }, { name = "rich-rst" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/cd/1fd03921a95113182e6fdf84af5d47f07aa91c00c03ac074c192b0d4672c/cyclopts-4.5.2.tar.gz", hash = "sha256:7fe01b2d184c55c4555e06a0397602b319d87faa5b086b41913eaeaea52fae16", size = 162381, upload-time = "2026-02-11T16:30:46.051Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/03/f906829bcfcbb945f19d6a64240ffb66a31d69ca5533e95882f0efc9c13c/cyclopts-4.5.2-py3-none-any.whl", hash = "sha256:ee56ee23c2c81abc34b66b5aa8fd2698ca699740054e84e534449ec3eb7f944d", size = 200165, upload-time = "2026-02-11T16:30:46.942Z" }, -] - -[[package]] -name = "diskcache" -version = "5.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/5c/88a4068c660a096bbe87efc5b7c190080c9e86919c36ec5f092cb08d852f/cyclopts-4.6.0.tar.gz", hash = "sha256:483c4704b953ea6da742e8de15972f405d2e748d19a848a4d61595e8e5360ee5", size = 162724, upload-time = "2026-02-23T15:44:49.286Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/1e8337755a70dc7d7ff10a73dc8f20e9352c9ad6c2256ed863ac95cd3539/cyclopts-4.6.0-py3-none-any.whl", hash = "sha256:0a891cb55bfd79a3cdce024db8987b33316aba11071e5258c21ac12a640ba9f2", size = 200518, upload-time = "2026-02-23T15:44:47.854Z" }, ] [[package]] @@ -620,27 +550,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, ] -[[package]] -name = "fakeredis" -version = "2.33.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "redis" }, - { name = "sortedcontainers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5f/f9/57464119936414d60697fcbd32f38909bb5688b616ae13de6e98384433e0/fakeredis-2.33.0.tar.gz", hash = "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770", size = 175187, upload-time = "2025-12-16T19:45:52.269Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/78/a850fed8aeef96d4a99043c90b818b2ed5419cd5b24a4049fd7cfb9f1471/fakeredis-2.33.0-py3-none-any.whl", hash = "sha256:de535f3f9ccde1c56672ab2fdd6a8efbc4f2619fc2f1acc87b8737177d71c965", size = 119605, upload-time = "2025-12-16T19:45:51.08Z" }, -] - -[package.optional-dependencies] -lua = [ - { name = "lupa" }, -] - [[package]] name = "fastmcp" -version = "2.14.5" +version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, @@ -651,29 +563,32 @@ dependencies = [ { name = "jsonschema-path" }, { name = "mcp" }, { name = "openapi-pydantic" }, + { name = "opentelemetry-api" }, { name = "packaging" }, { name = "platformdirs" }, - { name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] }, + { name = "py-key-value-aio", extra = ["filetree", "keyring", "memory"] }, { name = "pydantic", extra = ["email"] }, - { name = "pydocket" }, { name = "pyperclip" }, { name = "python-dotenv" }, + { name = "pyyaml" }, { name = "rich" }, + { name = "uncalled-for" }, { name = "uvicorn" }, + { name = "watchfiles" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/32/982678d44f13849530a74ab101ed80e060c2ee6cf87471f062dcf61705fd/fastmcp-2.14.5.tar.gz", hash = "sha256:38944dc582c541d55357082bda2241cedb42cd3a78faea8a9d6a2662c62a42d7", size = 8296329, upload-time = "2026-02-03T15:35:21.005Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/70/862026c4589441f86ad3108f05bfb2f781c6b322ad60a982f40b303b47d7/fastmcp-3.1.0.tar.gz", hash = "sha256:e25264794c734b9977502a51466961eeecff92a0c2f3b49c40c070993628d6d0", size = 17347083, upload-time = "2026-03-03T02:43:11.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/c1/1a35ec68ff76ea8443aa115b18bcdee748a4ada2124537ee90522899ff9f/fastmcp-2.14.5-py3-none-any.whl", hash = "sha256:d81e8ec813f5089d3624bec93944beaefa86c0c3a4ef1111cbef676a761ebccf", size = 417784, upload-time = "2026-02-03T15:35:18.489Z" }, + { url = "https://files.pythonhosted.org/packages/17/07/516f5b20d88932e5a466c2216b628e5358a71b3a9f522215607c3281de05/fastmcp-3.1.0-py3-none-any.whl", hash = "sha256:b1f73b56fd3b0cb2bd9e2a144fc650d5cc31587ed129d996db7710e464ae8010", size = 633749, upload-time = "2026-03-03T02:43:09.06Z" }, ] [[package]] name = "filelock" -version = "3.24.1" +version = "3.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4f/8a/b24ff2c2d7f20ce930b5efe91e7260247d185d8939707721168ad204e465/filelock-3.24.1.tar.gz", hash = "sha256:3440181dd03f8904c108c8e9f5b11d1663e9fc960f1c837586a11f1c5c041e54", size = 37452, upload-time = "2026-02-15T22:03:16.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/18/a1fd2231c679dcb9726204645721b12498aeac28e1ad0601038f94b42556/filelock-3.25.0.tar.gz", hash = "sha256:8f00faf3abf9dc730a1ffe9c354ae5c04e079ab7d3a683b7c32da5dd05f26af3", size = 40158, upload-time = "2026-03-01T15:08:45.916Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/64/3613e89811e79aca8d0d4f2c984fc66336bc9d83529c1cbe02f5df010d0a/filelock-3.24.1-py3-none-any.whl", hash = "sha256:7c59f595e3cf4887dc95b403a896849da49ed183d7c9d7ee855646ca99f10698", size = 24153, upload-time = "2026-02-15T22:03:15.262Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl", hash = "sha256:5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047", size = 26427, upload-time = "2026-03-01T15:08:44.593Z" }, ] [[package]] @@ -767,45 +682,45 @@ wheels = [ [[package]] name = "greenlet" -version = "3.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/99/1cd3411c56a410994669062bd73dd58270c00cc074cac15f385a1fd91f8a/greenlet-3.3.1.tar.gz", hash = "sha256:41848f3230b58c08bb43dee542e74a2a2e34d3c59dc3076cec9151aeeedcae98", size = 184690, upload-time = "2026-01-23T15:31:02.076Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" }, - { url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" }, - { url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" }, - { url = "https://files.pythonhosted.org/packages/3b/cd/7a7ca57588dac3389e97f7c9521cb6641fd8b6602faf1eaa4188384757df/greenlet-3.3.1-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c620051669fd04ac6b60ebc70478210119c56e2d5d5df848baec4312e260e4ca", size = 622363, upload-time = "2026-01-23T16:15:54.754Z" }, - { url = "https://files.pythonhosted.org/packages/cf/05/821587cf19e2ce1f2b24945d890b164401e5085f9d09cbd969b0c193cd20/greenlet-3.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14194f5f4305800ff329cbf02c5fcc88f01886cadd29941b807668a45f0d2336", size = 609947, upload-time = "2026-01-23T15:32:51.004Z" }, - { url = "https://files.pythonhosted.org/packages/a4/52/ee8c46ed9f8babaa93a19e577f26e3d28a519feac6350ed6f25f1afee7e9/greenlet-3.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7b2fe4150a0cf59f847a67db8c155ac36aed89080a6a639e9f16df5d6c6096f1", size = 1567487, upload-time = "2026-01-23T16:04:22.125Z" }, - { url = "https://files.pythonhosted.org/packages/8f/7c/456a74f07029597626f3a6db71b273a3632aecb9afafeeca452cfa633197/greenlet-3.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49f4ad195d45f4a66a0eb9c1ba4832bb380570d361912fa3554746830d332149", size = 1636087, upload-time = "2026-01-23T15:33:47.486Z" }, - { url = "https://files.pythonhosted.org/packages/34/2f/5e0e41f33c69655300a5e54aeb637cf8ff57f1786a3aba374eacc0228c1d/greenlet-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc98b9c4e4870fa983436afa999d4eb16b12872fab7071423d5262fa7120d57a", size = 227156, upload-time = "2026-01-23T15:34:34.808Z" }, - { url = "https://files.pythonhosted.org/packages/c8/ab/717c58343cf02c5265b531384b248787e04d8160b8afe53d9eec053d7b44/greenlet-3.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:bfb2d1763d777de5ee495c85309460f6fd8146e50ec9d0ae0183dbf6f0a829d1", size = 226403, upload-time = "2026-01-23T15:31:39.372Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ab/d26750f2b7242c2b90ea2ad71de70cfcd73a948a49513188a0fc0d6fc15a/greenlet-3.3.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:7ab327905cabb0622adca5971e488064e35115430cec2c35a50fd36e72a315b3", size = 275205, upload-time = "2026-01-23T15:30:24.556Z" }, - { url = "https://files.pythonhosted.org/packages/10/d3/be7d19e8fad7c5a78eeefb2d896a08cd4643e1e90c605c4be3b46264998f/greenlet-3.3.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65be2f026ca6a176f88fb935ee23c18333ccea97048076aef4db1ef5bc0713ac", size = 599284, upload-time = "2026-01-23T16:00:58.584Z" }, - { url = "https://files.pythonhosted.org/packages/ae/21/fe703aaa056fdb0f17e5afd4b5c80195bbdab701208918938bd15b00d39b/greenlet-3.3.1-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7a3ae05b3d225b4155bda56b072ceb09d05e974bc74be6c3fc15463cf69f33fd", size = 610274, upload-time = "2026-01-23T16:05:29.312Z" }, - { url = "https://files.pythonhosted.org/packages/06/00/95df0b6a935103c0452dad2203f5be8377e551b8466a29650c4c5a5af6cc/greenlet-3.3.1-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:12184c61e5d64268a160226fb4818af4df02cfead8379d7f8b99a56c3a54ff3e", size = 624375, upload-time = "2026-01-23T16:15:55.915Z" }, - { url = "https://files.pythonhosted.org/packages/cb/86/5c6ab23bb3c28c21ed6bebad006515cfe08b04613eb105ca0041fecca852/greenlet-3.3.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6423481193bbbe871313de5fd06a082f2649e7ce6e08015d2a76c1e9186ca5b3", size = 612904, upload-time = "2026-01-23T15:32:52.317Z" }, - { url = "https://files.pythonhosted.org/packages/c2/f3/7949994264e22639e40718c2daf6f6df5169bf48fb038c008a489ec53a50/greenlet-3.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:33a956fe78bbbda82bfc95e128d61129b32d66bcf0a20a1f0c08aa4839ffa951", size = 1567316, upload-time = "2026-01-23T16:04:23.316Z" }, - { url = "https://files.pythonhosted.org/packages/8d/6e/d73c94d13b6465e9f7cd6231c68abde838bb22408596c05d9059830b7872/greenlet-3.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b065d3284be43728dd280f6f9a13990b56470b81be20375a207cdc814a983f2", size = 1636549, upload-time = "2026-01-23T15:33:48.643Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b3/c9c23a6478b3bcc91f979ce4ca50879e4d0b2bd7b9a53d8ecded719b92e2/greenlet-3.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:27289986f4e5b0edec7b5a91063c109f0276abb09a7e9bdab08437525977c946", size = 227042, upload-time = "2026-01-23T15:33:58.216Z" }, - { url = "https://files.pythonhosted.org/packages/90/e7/824beda656097edee36ab15809fd063447b200cc03a7f6a24c34d520bc88/greenlet-3.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:2f080e028001c5273e0b42690eaf359aeef9cb1389da0f171ea51a5dc3c7608d", size = 226294, upload-time = "2026-01-23T15:30:52.73Z" }, - { url = "https://files.pythonhosted.org/packages/ae/fb/011c7c717213182caf78084a9bea51c8590b0afda98001f69d9f853a495b/greenlet-3.3.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:bd59acd8529b372775cd0fcbc5f420ae20681c5b045ce25bd453ed8455ab99b5", size = 275737, upload-time = "2026-01-23T15:32:16.889Z" }, - { url = "https://files.pythonhosted.org/packages/41/2e/a3a417d620363fdbb08a48b1dd582956a46a61bf8fd27ee8164f9dfe87c2/greenlet-3.3.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b31c05dd84ef6871dd47120386aed35323c944d86c3d91a17c4b8d23df62f15b", size = 646422, upload-time = "2026-01-23T16:01:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/b4/09/c6c4a0db47defafd2d6bab8ddfe47ad19963b4e30f5bed84d75328059f8c/greenlet-3.3.1-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02925a0bfffc41e542c70aa14c7eda3593e4d7e274bfcccca1827e6c0875902e", size = 658219, upload-time = "2026-01-23T16:05:30.956Z" }, - { url = "https://files.pythonhosted.org/packages/e2/89/b95f2ddcc5f3c2bc09c8ee8d77be312df7f9e7175703ab780f2014a0e781/greenlet-3.3.1-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3e0f3878ca3a3ff63ab4ea478585942b53df66ddde327b59ecb191b19dbbd62d", size = 671455, upload-time = "2026-01-23T16:15:57.232Z" }, - { url = "https://files.pythonhosted.org/packages/80/38/9d42d60dffb04b45f03dbab9430898352dba277758640751dc5cc316c521/greenlet-3.3.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34a729e2e4e4ffe9ae2408d5ecaf12f944853f40ad724929b7585bca808a9d6f", size = 660237, upload-time = "2026-01-23T15:32:53.967Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/373c30b7197f9e756e4c81ae90a8d55dc3598c17673f91f4d31c3c689c3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aec9ab04e82918e623415947921dea15851b152b822661cce3f8e4393c3df683", size = 1615261, upload-time = "2026-01-23T16:04:25.066Z" }, - { url = "https://files.pythonhosted.org/packages/fd/d3/ca534310343f5945316f9451e953dcd89b36fe7a19de652a1dc5a0eeef3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:71c767cf281a80d02b6c1bdc41c9468e1f5a494fb11bc8688c360524e273d7b1", size = 1683719, upload-time = "2026-01-23T15:33:50.61Z" }, - { url = "https://files.pythonhosted.org/packages/52/cb/c21a3fd5d2c9c8b622e7bede6d6d00e00551a5ee474ea6d831b5f567a8b4/greenlet-3.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:96aff77af063b607f2489473484e39a0bbae730f2ea90c9e5606c9b73c44174a", size = 228125, upload-time = "2026-01-23T15:32:45.265Z" }, - { url = "https://files.pythonhosted.org/packages/6a/8e/8a2db6d11491837af1de64b8aff23707c6e85241be13c60ed399a72e2ef8/greenlet-3.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:b066e8b50e28b503f604fa538adc764a638b38cf8e81e025011d26e8a627fa79", size = 227519, upload-time = "2026-01-23T15:31:47.284Z" }, - { url = "https://files.pythonhosted.org/packages/28/24/cbbec49bacdcc9ec652a81d3efef7b59f326697e7edf6ed775a5e08e54c2/greenlet-3.3.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:3e63252943c921b90abb035ebe9de832c436401d9c45f262d80e2d06cc659242", size = 282706, upload-time = "2026-01-23T15:33:05.525Z" }, - { url = "https://files.pythonhosted.org/packages/86/2e/4f2b9323c144c4fe8842a4e0d92121465485c3c2c5b9e9b30a52e80f523f/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76e39058e68eb125de10c92524573924e827927df5d3891fbc97bd55764a8774", size = 651209, upload-time = "2026-01-23T16:01:01.517Z" }, - { url = "https://files.pythonhosted.org/packages/d9/87/50ca60e515f5bb55a2fbc5f0c9b5b156de7d2fc51a0a69abc9d23914a237/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9f9d5e7a9310b7a2f416dd13d2e3fd8b42d803968ea580b7c0f322ccb389b97", size = 654300, upload-time = "2026-01-23T16:05:32.199Z" }, - { url = "https://files.pythonhosted.org/packages/7c/25/c51a63f3f463171e09cb586eb64db0861eb06667ab01a7968371a24c4f3b/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b9721549a95db96689458a1e0ae32412ca18776ed004463df3a9299c1b257ab", size = 662574, upload-time = "2026-01-23T16:15:58.364Z" }, - { url = "https://files.pythonhosted.org/packages/1d/94/74310866dfa2b73dd08659a3d18762f83985ad3281901ba0ee9a815194fb/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92497c78adf3ac703b57f1e3813c2d874f27f71a178f9ea5887855da413cd6d2", size = 653842, upload-time = "2026-01-23T15:32:55.671Z" }, - { url = "https://files.pythonhosted.org/packages/97/43/8bf0ffa3d498eeee4c58c212a3905dd6146c01c8dc0b0a046481ca29b18c/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ed6b402bc74d6557a705e197d47f9063733091ed6357b3de33619d8a8d93ac53", size = 1614917, upload-time = "2026-01-23T16:04:26.276Z" }, - { url = "https://files.pythonhosted.org/packages/89/90/a3be7a5f378fc6e84abe4dcfb2ba32b07786861172e502388b4c90000d1b/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:59913f1e5ada20fde795ba906916aea25d442abcc0593fba7e26c92b7ad76249", size = 1676092, upload-time = "2026-01-23T15:33:52.176Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2b/98c7f93e6db9977aaee07eb1e51ca63bd5f779b900d362791d3252e60558/greenlet-3.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:301860987846c24cb8964bdec0e31a96ad4a2a801b41b4ef40963c1b44f33451", size = 233181, upload-time = "2026-01-23T15:33:00.29Z" }, +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, + { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, + { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, + { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, + { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, + { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, + { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, + { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, + { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, + { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, + { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, + { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, + { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, + { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, + { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, + { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, + { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, + { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, + { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, ] [[package]] @@ -856,11 +771,11 @@ wheels = [ [[package]] name = "identify" -version = "2.6.16" +version = "2.6.17" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/84/376a3b96e5a8d33a7aa2c5b3b31a4b3c364117184bf0b17418055f6ace66/identify-2.6.17.tar.gz", hash = "sha256:f816b0b596b204c9fdf076ded172322f2723cf958d02f9c3587504834c8ff04d", size = 99579, upload-time = "2026-03-01T20:04:12.702Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, + { url = "https://files.pythonhosted.org/packages/40/66/71c1227dff78aaeb942fed29dd5651f2aec166cc7c9aeea3e8b26a539b7d/identify-2.6.17-py2.py3-none-any.whl", hash = "sha256:be5f8412d5ed4b20f2bd41a65f920990bdccaa6a4a18a08f1eefdcd0bdd885f0", size = 99382, upload-time = "2026-03-01T20:04:11.439Z" }, ] [[package]] @@ -987,17 +902,16 @@ wheels = [ [[package]] name = "jsonschema-path" -version = "0.3.4" +version = "0.4.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pathable" }, { name = "pyyaml" }, { name = "referencing" }, - { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159, upload-time = "2025-01-24T14:33:16.547Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8a/7e6102f2b8bdc6705a9eb5294f8f6f9ccd3a8420e8e8e19671d1dd773251/jsonschema_path-0.4.5.tar.gz", hash = "sha256:c6cd7d577ae290c7defd4f4029e86fdb248ca1bd41a07557795b3c95e5144918", size = 15113, upload-time = "2026-03-03T09:56:46.87Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810, upload-time = "2025-01-24T14:33:14.652Z" }, + { url = "https://files.pythonhosted.org/packages/04/d5/4e96c44f6c1ea3d812cf5391d81a4f5abaa540abf8d04ecd7f66e0ed11df/jsonschema_path-0.4.5-py3-none-any.whl", hash = "sha256:7d77a2c3f3ec569a40efe5c5f942c44c1af2a6f96fe0866794c9ef5b8f87fd65", size = 19368, upload-time = "2026-03-03T09:56:45.39Z" }, ] [[package]] @@ -1072,58 +986,6 @@ dev = [ { name = "ty", specifier = ">=0.0.1a12" }, ] -[[package]] -name = "lupa" -version = "2.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/1c/191c3e6ec6502e3dbe25a53e27f69a5daeac3e56de1f73c0138224171ead/lupa-2.6.tar.gz", hash = "sha256:9a770a6e89576be3447668d7ced312cd6fd41d3c13c2462c9dc2c2ab570e45d9", size = 7240282, upload-time = "2025-10-24T07:20:29.738Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/86/ce243390535c39d53ea17ccf0240815e6e457e413e40428a658ea4ee4b8d/lupa-2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47ce718817ef1cc0c40d87c3d5ae56a800d61af00fbc0fad1ca9be12df2f3b56", size = 951707, upload-time = "2025-10-24T07:18:03.884Z" }, - { url = "https://files.pythonhosted.org/packages/86/85/cedea5e6cbeb54396fdcc55f6b741696f3f036d23cfaf986d50d680446da/lupa-2.6-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7aba985b15b101495aa4b07112cdc08baa0c545390d560ad5cfde2e9e34f4d58", size = 1916703, upload-time = "2025-10-24T07:18:05.6Z" }, - { url = "https://files.pythonhosted.org/packages/24/be/3d6b5f9a8588c01a4d88129284c726017b2089f3a3fd3ba8bd977292fea0/lupa-2.6-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:b766f62f95b2739f2248977d29b0722e589dcf4f0ccfa827ccbd29f0148bd2e5", size = 985152, upload-time = "2025-10-24T07:18:08.561Z" }, - { url = "https://files.pythonhosted.org/packages/eb/23/9f9a05beee5d5dce9deca4cb07c91c40a90541fc0a8e09db4ee670da550f/lupa-2.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:00a934c23331f94cb51760097ebfab14b005d55a6b30a2b480e3c53dd2fa290d", size = 1159599, upload-time = "2025-10-24T07:18:10.346Z" }, - { url = "https://files.pythonhosted.org/packages/40/4e/e7c0583083db9d7f1fd023800a9767d8e4391e8330d56c2373d890ac971b/lupa-2.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21de9f38bd475303e34a042b7081aabdf50bd9bafd36ce4faea2f90fd9f15c31", size = 1038686, upload-time = "2025-10-24T07:18:12.112Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9f/5a4f7d959d4feba5e203ff0c31889e74d1ca3153122be4a46dca7d92bf7c/lupa-2.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf3bda96d3fc41237e964a69c23647d50d4e28421111360274d4799832c560e9", size = 2071956, upload-time = "2025-10-24T07:18:14.572Z" }, - { url = "https://files.pythonhosted.org/packages/92/34/2f4f13ca65d01169b1720176aedc4af17bc19ee834598c7292db232cb6dc/lupa-2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a76ead245da54801a81053794aa3975f213221f6542d14ec4b859ee2e7e0323", size = 1057199, upload-time = "2025-10-24T07:18:16.379Z" }, - { url = "https://files.pythonhosted.org/packages/35/2a/5f7d2eebec6993b0dcd428e0184ad71afb06a45ba13e717f6501bfed1da3/lupa-2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8dd0861741caa20886ddbda0a121d8e52fb9b5bb153d82fa9bba796962bf30e8", size = 1173693, upload-time = "2025-10-24T07:18:18.153Z" }, - { url = "https://files.pythonhosted.org/packages/e4/29/089b4d2f8e34417349af3904bb40bec40b65c8731f45e3fd8d497ca573e5/lupa-2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:239e63948b0b23023f81d9a19a395e768ed3da6a299f84e7963b8f813f6e3f9c", size = 2164394, upload-time = "2025-10-24T07:18:20.403Z" }, - { url = "https://files.pythonhosted.org/packages/f3/1b/79c17b23c921f81468a111cad843b076a17ef4b684c4a8dff32a7969c3f0/lupa-2.6-cp312-cp312-win32.whl", hash = "sha256:325894e1099499e7a6f9c351147661a2011887603c71086d36fe0f964d52d1ce", size = 1420647, upload-time = "2025-10-24T07:18:23.368Z" }, - { url = "https://files.pythonhosted.org/packages/b8/15/5121e68aad3584e26e1425a5c9a79cd898f8a152292059e128c206ee817c/lupa-2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c735a1ce8ee60edb0fe71d665f1e6b7c55c6021f1d340eb8c865952c602cd36f", size = 1688529, upload-time = "2025-10-24T07:18:25.523Z" }, - { url = "https://files.pythonhosted.org/packages/28/1d/21176b682ca5469001199d8b95fa1737e29957a3d185186e7a8b55345f2e/lupa-2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:663a6e58a0f60e7d212017d6678639ac8df0119bc13c2145029dcba084391310", size = 947232, upload-time = "2025-10-24T07:18:27.878Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4c/d327befb684660ca13cf79cd1f1d604331808f9f1b6fb6bf57832f8edf80/lupa-2.6-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f5afda5c20b1f3217a80e9bc1b77037f8a6eb11612fd3ada19065303c8f380", size = 1908625, upload-time = "2025-10-24T07:18:29.944Z" }, - { url = "https://files.pythonhosted.org/packages/66/8e/ad22b0a19454dfd08662237a84c792d6d420d36b061f239e084f29d1a4f3/lupa-2.6-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:26f2b3c085fe76e9119e48c1013c1cccdc1f51585d456858290475aa38e7089e", size = 981057, upload-time = "2025-10-24T07:18:31.553Z" }, - { url = "https://files.pythonhosted.org/packages/5c/48/74859073ab276bd0566c719f9ca0108b0cfc1956ca0d68678d117d47d155/lupa-2.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:60d2f902c7b96fb8ab98493dcff315e7bb4d0b44dc9dd76eb37de575025d5685", size = 1156227, upload-time = "2025-10-24T07:18:33.981Z" }, - { url = "https://files.pythonhosted.org/packages/09/6c/0e9ded061916877253c2266074060eb71ed99fb21d73c8c114a76725bce2/lupa-2.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a02d25dee3a3250967c36590128d9220ae02f2eda166a24279da0b481519cbff", size = 1035752, upload-time = "2025-10-24T07:18:36.32Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ef/f8c32e454ef9f3fe909f6c7d57a39f950996c37a3deb7b391fec7903dab7/lupa-2.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6eae1ee16b886b8914ff292dbefbf2f48abfbdee94b33a88d1d5475e02423203", size = 2069009, upload-time = "2025-10-24T07:18:38.072Z" }, - { url = "https://files.pythonhosted.org/packages/53/dc/15b80c226a5225815a890ee1c11f07968e0aba7a852df41e8ae6fe285063/lupa-2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0edd5073a4ee74ab36f74fe61450148e6044f3952b8d21248581f3c5d1a58be", size = 1056301, upload-time = "2025-10-24T07:18:40.165Z" }, - { url = "https://files.pythonhosted.org/packages/31/14/2086c1425c985acfb30997a67e90c39457122df41324d3c179d6ee2292c6/lupa-2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c53ee9f22a8a17e7d4266ad48e86f43771951797042dd51d1494aaa4f5f3f0a", size = 1170673, upload-time = "2025-10-24T07:18:42.426Z" }, - { url = "https://files.pythonhosted.org/packages/10/e5/b216c054cf86576c0191bf9a9f05de6f7e8e07164897d95eea0078dca9b2/lupa-2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:de7c0f157a9064a400d828789191a96da7f4ce889969a588b87ec80de9b14772", size = 2162227, upload-time = "2025-10-24T07:18:46.112Z" }, - { url = "https://files.pythonhosted.org/packages/59/2f/33ecb5bedf4f3bc297ceacb7f016ff951331d352f58e7e791589609ea306/lupa-2.6-cp313-cp313-win32.whl", hash = "sha256:ee9523941ae0a87b5b703417720c5d78f72d2f5bc23883a2ea80a949a3ed9e75", size = 1419558, upload-time = "2025-10-24T07:18:48.371Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b4/55e885834c847ea610e111d87b9ed4768f0afdaeebc00cd46810f25029f6/lupa-2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b1335a5835b0a25ebdbc75cf0bda195e54d133e4d994877ef025e218c2e59db9", size = 1683424, upload-time = "2025-10-24T07:18:50.976Z" }, - { url = "https://files.pythonhosted.org/packages/66/9d/d9427394e54d22a35d1139ef12e845fd700d4872a67a34db32516170b746/lupa-2.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dcb6d0a3264873e1653bc188499f48c1fb4b41a779e315eba45256cfe7bc33c1", size = 953818, upload-time = "2025-10-24T07:18:53.378Z" }, - { url = "https://files.pythonhosted.org/packages/10/41/27bbe81953fb2f9ecfced5d9c99f85b37964cfaf6aa8453bb11283983721/lupa-2.6-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:a37e01f2128f8c36106726cb9d360bac087d58c54b4522b033cc5691c584db18", size = 1915850, upload-time = "2025-10-24T07:18:55.259Z" }, - { url = "https://files.pythonhosted.org/packages/a3/98/f9ff60db84a75ba8725506bbf448fb085bc77868a021998ed2a66d920568/lupa-2.6-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:458bd7e9ff3c150b245b0fcfbb9bd2593d1152ea7f0a7b91c1d185846da033fe", size = 982344, upload-time = "2025-10-24T07:18:57.05Z" }, - { url = "https://files.pythonhosted.org/packages/41/f7/f39e0f1c055c3b887d86b404aaf0ca197b5edfd235a8b81b45b25bac7fc3/lupa-2.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:052ee82cac5206a02df77119c325339acbc09f5ce66967f66a2e12a0f3211cad", size = 1156543, upload-time = "2025-10-24T07:18:59.251Z" }, - { url = "https://files.pythonhosted.org/packages/9e/9c/59e6cffa0d672d662ae17bd7ac8ecd2c89c9449dee499e3eb13ca9cd10d9/lupa-2.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96594eca3c87dd07938009e95e591e43d554c1dbd0385be03c100367141db5a8", size = 1047974, upload-time = "2025-10-24T07:19:01.449Z" }, - { url = "https://files.pythonhosted.org/packages/23/c6/a04e9cef7c052717fcb28fb63b3824802488f688391895b618e39be0f684/lupa-2.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8faddd9d198688c8884091173a088a8e920ecc96cda2ffed576a23574c4b3f6", size = 2073458, upload-time = "2025-10-24T07:19:03.369Z" }, - { url = "https://files.pythonhosted.org/packages/e6/10/824173d10f38b51fc77785228f01411b6ca28826ce27404c7c912e0e442c/lupa-2.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:daebb3a6b58095c917e76ba727ab37b27477fb926957c825205fbda431552134", size = 1067683, upload-time = "2025-10-24T07:19:06.2Z" }, - { url = "https://files.pythonhosted.org/packages/b6/dc/9692fbcf3c924d9c4ece2d8d2f724451ac2e09af0bd2a782db1cef34e799/lupa-2.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f3154e68972befe0f81564e37d8142b5d5d79931a18309226a04ec92487d4ea3", size = 1171892, upload-time = "2025-10-24T07:19:08.544Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/e318b628d4643c278c96ab3ddea07fc36b075a57383c837f5b11e537ba9d/lupa-2.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e4dadf77b9fedc0bfa53417cc28dc2278a26d4cbd95c29f8927ad4d8fe0a7ef9", size = 2166641, upload-time = "2025-10-24T07:19:10.485Z" }, - { url = "https://files.pythonhosted.org/packages/12/f7/a6f9ec2806cf2d50826980cdb4b3cffc7691dc6f95e13cc728846d5cb793/lupa-2.6-cp314-cp314-win32.whl", hash = "sha256:cb34169c6fa3bab3e8ac58ca21b8a7102f6a94b6a5d08d3636312f3f02fafd8f", size = 1456857, upload-time = "2025-10-24T07:19:37.989Z" }, - { url = "https://files.pythonhosted.org/packages/c5/de/df71896f25bdc18360fdfa3b802cd7d57d7fede41a0e9724a4625b412c85/lupa-2.6-cp314-cp314-win_amd64.whl", hash = "sha256:b74f944fe46c421e25d0f8692aef1e842192f6f7f68034201382ac440ef9ea67", size = 1731191, upload-time = "2025-10-24T07:19:40.281Z" }, - { url = "https://files.pythonhosted.org/packages/47/3c/a1f23b01c54669465f5f4c4083107d496fbe6fb45998771420e9aadcf145/lupa-2.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0e21b716408a21ab65723f8841cf7f2f37a844b7a965eeabb785e27fca4099cf", size = 999343, upload-time = "2025-10-24T07:19:12.519Z" }, - { url = "https://files.pythonhosted.org/packages/c5/6d/501994291cb640bfa2ccf7f554be4e6914afa21c4026bd01bff9ca8aac57/lupa-2.6-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:589db872a141bfff828340079bbdf3e9a31f2689f4ca0d88f97d9e8c2eae6142", size = 2000730, upload-time = "2025-10-24T07:19:14.869Z" }, - { url = "https://files.pythonhosted.org/packages/53/a5/457ffb4f3f20469956c2d4c4842a7675e884efc895b2f23d126d23e126cc/lupa-2.6-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:cd852a91a4a9d4dcbb9a58100f820a75a425703ec3e3f049055f60b8533b7953", size = 1021553, upload-time = "2025-10-24T07:19:17.123Z" }, - { url = "https://files.pythonhosted.org/packages/51/6b/36bb5a5d0960f2a5c7c700e0819abb76fd9bf9c1d8a66e5106416d6e9b14/lupa-2.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:0334753be028358922415ca97a64a3048e4ed155413fc4eaf87dd0a7e2752983", size = 1133275, upload-time = "2025-10-24T07:19:20.51Z" }, - { url = "https://files.pythonhosted.org/packages/19/86/202ff4429f663013f37d2229f6176ca9f83678a50257d70f61a0a97281bf/lupa-2.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:661d895cd38c87658a34780fac54a690ec036ead743e41b74c3fb81a9e65a6aa", size = 1038441, upload-time = "2025-10-24T07:19:22.509Z" }, - { url = "https://files.pythonhosted.org/packages/a7/42/d8125f8e420714e5b52e9c08d88b5329dfb02dcca731b4f21faaee6cc5b5/lupa-2.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aa58454ccc13878cc177c62529a2056be734da16369e451987ff92784994ca7", size = 2058324, upload-time = "2025-10-24T07:19:24.979Z" }, - { url = "https://files.pythonhosted.org/packages/2b/2c/47bf8b84059876e877a339717ddb595a4a7b0e8740bacae78ba527562e1c/lupa-2.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1425017264e470c98022bba8cff5bd46d054a827f5df6b80274f9cc71dafd24f", size = 1060250, upload-time = "2025-10-24T07:19:27.262Z" }, - { url = "https://files.pythonhosted.org/packages/c2/06/d88add2b6406ca1bdec99d11a429222837ca6d03bea42ca75afa169a78cb/lupa-2.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:224af0532d216e3105f0a127410f12320f7c5f1aa0300bdf9646b8d9afb0048c", size = 1151126, upload-time = "2025-10-24T07:19:29.522Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/89e6a024c3b4485b89ef86881c9d55e097e7cb0bdb74efb746f2fa6a9a76/lupa-2.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9abb98d5a8fd27c8285302e82199f0e56e463066f88f619d6594a450bf269d80", size = 2153693, upload-time = "2025-10-24T07:19:31.379Z" }, - { url = "https://files.pythonhosted.org/packages/b6/36/a0f007dc58fc1bbf51fb85dcc82fcb1f21b8c4261361de7dab0e3d8521ef/lupa-2.6-cp314-cp314t-win32.whl", hash = "sha256:1849efeba7a8f6fb8aa2c13790bee988fd242ae404bd459509640eeea3d1e291", size = 1590104, upload-time = "2025-10-24T07:19:33.514Z" }, - { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" }, -] - [[package]] name = "markdown-it-py" version = "4.0.0" @@ -1301,15 +1163,15 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.39.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, ] [[package]] @@ -1342,29 +1204,20 @@ wheels = [ [[package]] name = "pathable" -version = "0.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/67/93/8f2c2075b180c12c1e9f6a09d1a985bc2036906b13dff1d8917e395f2048/pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2", size = 8124, upload-time = "2025-01-10T18:43:13.247Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/eb/b6260b31b1a96386c0a880edebe26f89669098acea8e0318bff6adb378fd/pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2", size = 9592, upload-time = "2025-01-10T18:43:11.88Z" }, -] - -[[package]] -name = "pathvalidate" -version = "3.3.1" +version = "0.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/55/b748445cb4ea6b125626f15379be7c96d1035d4fa3e8fee362fa92298abf/pathable-0.5.0.tar.gz", hash = "sha256:d81938348a1cacb525e7c75166270644782c0fb9c8cecc16be033e71427e0ef1", size = 16655, upload-time = "2026-02-20T08:47:00.748Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" }, + { url = "https://files.pythonhosted.org/packages/52/96/5a770e5c461462575474468e5af931cff9de036e7c2b4fea23c1c58d2cbe/pathable-0.5.0-py3-none-any.whl", hash = "sha256:646e3d09491a6351a0c82632a09c02cdf70a252e73196b36d8a15ba0a114f0a6", size = 16867, upload-time = "2026-02-20T08:46:59.536Z" }, ] [[package]] name = "platformdirs" -version = "4.9.1" +version = "4.9.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/d5/763666321efaded11112de8b7a7f2273dd8d1e205168e73c334e54b0ab9a/platformdirs-4.9.1.tar.gz", hash = "sha256:f310f16e89c4e29117805d8328f7c10876eeff36c94eac879532812110f7d39f", size = 28392, upload-time = "2026-02-14T21:02:44.973Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/04/fea538adf7dbbd6d186f551d595961e564a3b6715bdf276b477460858672/platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291", size = 28394, upload-time = "2026-02-16T03:56:10.574Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/77/e8c95e95f1d4cdd88c90a96e31980df7e709e51059fac150046ad67fac63/platformdirs-4.9.1-py3-none-any.whl", hash = "sha256:61d8b967d34791c162d30d60737369cbbd77debad5b981c4bfda1842e71e0d66", size = 21307, upload-time = "2026-02-14T21:02:43.492Z" }, + { url = "https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd", size = 21168, upload-time = "2026-02-16T03:56:08.891Z" }, ] [[package]] @@ -1392,15 +1245,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, ] -[[package]] -name = "prometheus-client" -version = "0.24.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/58/a794d23feb6b00fc0c72787d7e87d872a6730dd9ed7c7b3e954637d8f280/prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9", size = 85616, upload-time = "2026-01-14T15:26:26.965Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055", size = 64057, upload-time = "2026-01-14T15:26:24.42Z" }, -] - [[package]] name = "propcache" version = "0.4.1" @@ -1487,21 +1331,21 @@ wheels = [ [[package]] name = "py-key-value-aio" -version = "0.3.0" +version = "0.4.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beartype" }, - { name = "py-key-value-shared" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/ce/3136b771dddf5ac905cc193b461eb67967cf3979688c6696e1f2cdcde7ea/py_key_value_aio-0.3.0.tar.gz", hash = "sha256:858e852fcf6d696d231266da66042d3355a7f9871650415feef9fca7a6cd4155", size = 50801, upload-time = "2025-11-17T16:50:04.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/3c/0397c072a38d4bc580994b42e0c90c5f44f679303489e4376289534735e5/py_key_value_aio-0.4.4.tar.gz", hash = "sha256:e3012e6243ed7cc09bb05457bd4d03b1ba5c2b1ca8700096b3927db79ffbbe55", size = 92300, upload-time = "2026-02-16T21:21:43.245Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/10/72f6f213b8f0bce36eff21fda0a13271834e9eeff7f9609b01afdc253c79/py_key_value_aio-0.3.0-py3-none-any.whl", hash = "sha256:1c781915766078bfd608daa769fefb97e65d1d73746a3dfb640460e322071b64", size = 96342, upload-time = "2025-11-17T16:50:03.801Z" }, + { url = "https://files.pythonhosted.org/packages/32/69/f1b537ee70b7def42d63124a539ed3026a11a3ffc3086947a1ca6e861868/py_key_value_aio-0.4.4-py3-none-any.whl", hash = "sha256:18e17564ecae61b987f909fc2cd41ee2012c84b4b1dcb8c055cf8b4bc1bf3f5d", size = 152291, upload-time = "2026-02-16T21:21:44.241Z" }, ] [package.optional-dependencies] -disk = [ - { name = "diskcache" }, - { name = "pathvalidate" }, +filetree = [ + { name = "aiofile" }, + { name = "anyio" }, ] keyring = [ { name = "keyring" }, @@ -1509,22 +1353,6 @@ keyring = [ memory = [ { name = "cachetools" }, ] -redis = [ - { name = "redis" }, -] - -[[package]] -name = "py-key-value-shared" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beartype" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7b/e4/1971dfc4620a3a15b4579fe99e024f5edd6e0967a71154771a059daff4db/py_key_value_shared-0.3.0.tar.gz", hash = "sha256:8fdd786cf96c3e900102945f92aa1473138ebe960ef49da1c833790160c28a4b", size = 11666, upload-time = "2025-11-17T16:50:06.849Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e4/b8b0a03ece72f47dce2307d36e1c34725b7223d209fc679315ffe6a4e2c3/py_key_value_shared-0.3.0-py3-none-any.whl", hash = "sha256:5b0efba7ebca08bb158b1e93afc2f07d30b8f40c2fc12ce24a4c0d84f42f9298", size = 19560, upload-time = "2025-11-17T16:50:05.954Z" }, -] [[package]] name = "pycparser" @@ -1628,38 +1456,16 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.13.0" +version = "2.13.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/a1/ae859ffac5a3338a66b74c5e29e244fd3a3cc483c89feaf9f56c39898d75/pydantic_settings-2.13.0.tar.gz", hash = "sha256:95d875514610e8595672800a5c40b073e99e4aae467fa7c8f9c263061ea2e1fe", size = 222450, upload-time = "2026-02-15T12:11:23.476Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1a/dd1b9d7e627486cf8e7523d09b70010e05a4bc41414f4ae6ce184cf0afb6/pydantic_settings-2.13.0-py3-none-any.whl", hash = "sha256:d67b576fff39cd086b595441bf9c75d4193ca9c0ed643b90360694d0f1240246", size = 58429, upload-time = "2026-02-15T12:11:22.133Z" }, -] - -[[package]] -name = "pydocket" -version = "0.17.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cloudpickle" }, - { name = "croniter" }, - { name = "fakeredis", extra = ["lua"] }, - { name = "opentelemetry-api" }, - { name = "prometheus-client" }, - { name = "py-key-value-aio", extra = ["memory", "redis"] }, - { name = "python-json-logger" }, - { name = "redis" }, - { name = "rich" }, - { name = "typer" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cd/b2/5e12dbe2acf59e4499285e8eee66e8e81b6ba2f553696d2f4ccca0a7978c/pydocket-0.17.7.tar.gz", hash = "sha256:5c77ec6731a167cdcb44174abf793fe63e7b6c1c1c8a799cc6ec7502b361ee77", size = 347071, upload-time = "2026-02-11T21:01:31.744Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/c7/68f2553819965326f968375f02597d49efe71b309ba9d8fef539aeb51c48/pydocket-0.17.7-py3-none-any.whl", hash = "sha256:d1e0921ac02026c4a0140fc72a3848545f3e91e6e74c6e32c588489017c130b2", size = 94608, upload-time = "2026-02-11T21:01:30.111Z" }, + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, ] [[package]] @@ -1763,33 +1569,25 @@ wheels = [ ] [[package]] -name = "python-dateutil" -version = "2.9.0.post0" +name = "python-discovery" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "six" }, + { name = "filelock" }, + { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/bb/93a3e83bdf9322c7e21cafd092e56a4a17c4d8ef4277b6eb01af1a540a6f/python_discovery-1.1.0.tar.gz", hash = "sha256:447941ba1aed8cc2ab7ee3cb91be5fc137c5bdbb05b7e6ea62fbdcb66e50b268", size = 55674, upload-time = "2026-02-26T09:42:49.668Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/06/54/82a6e2ef37f0f23dccac604b9585bdcbd0698604feb64807dcb72853693e/python_discovery-1.1.0-py3-none-any.whl", hash = "sha256:a162893b8809727f54594a99ad2179d2ede4bf953e12d4c7abc3cc9cdbd1437b", size = 30687, upload-time = "2026-02-26T09:42:48.548Z" }, ] [[package]] name = "python-dotenv" -version = "1.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, -] - -[[package]] -name = "python-json-logger" -version = "4.0.0" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] @@ -1801,15 +1599,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, ] -[[package]] -name = "pytz" -version = "2025.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, -] - [[package]] name = "pywin32" version = "311" @@ -1890,55 +1679,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, ] -[[package]] -name = "redis" -version = "7.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/80/2971931d27651affa88a44c0ad7b8c4a19dc29c998abb20b23868d319b59/redis-7.1.1.tar.gz", hash = "sha256:a2814b2bda15b39dad11391cc48edac4697214a8a5a4bd10abe936ab4892eb43", size = 4800064, upload-time = "2026-02-09T18:39:40.292Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/55/1de1d812ba1481fa4b37fb03b4eec0fcb71b6a0d44c04ea3482eb017600f/redis-7.1.1-py3-none-any.whl", hash = "sha256:f77817f16071c2950492c67d40b771fa493eb3fccc630a424a10976dbb794b7a", size = 356057, upload-time = "2026-02-09T18:39:38.602Z" }, -] - [[package]] name = "referencing" -version = "0.36.2" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, -] - -[[package]] -name = "requests" -version = "2.32.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, ] [[package]] name = "rich" -version = "14.3.2" +version = "14.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] [[package]] @@ -2037,27 +1802,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/dc/4e6ac71b511b141cf626357a3946679abeba4cf67bc7cc5a17920f31e10d/ruff-0.15.1.tar.gz", hash = "sha256:c590fe13fb57c97141ae975c03a1aedb3d3156030cabd740d6ff0b0d601e203f", size = 4540855, upload-time = "2026-02-12T23:09:09.998Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/23/bf/e6e4324238c17f9d9120a9d60aa99a7daaa21204c07fcd84e2ef03bb5fd1/ruff-0.15.1-py3-none-linux_armv6l.whl", hash = "sha256:b101ed7cf4615bda6ffe65bdb59f964e9f4a0d3f85cbf0e54f0ab76d7b90228a", size = 10367819, upload-time = "2026-02-12T23:09:03.598Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ea/c8f89d32e7912269d38c58f3649e453ac32c528f93bb7f4219258be2e7ed/ruff-0.15.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:939c995e9277e63ea632cc8d3fae17aa758526f49a9a850d2e7e758bfef46602", size = 10798618, upload-time = "2026-02-12T23:09:22.928Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0f/1d0d88bc862624247d82c20c10d4c0f6bb2f346559d8af281674cf327f15/ruff-0.15.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d83466455fdefe60b8d9c8df81d3c1bbb2115cede53549d3b522ce2bc703899", size = 10148518, upload-time = "2026-02-12T23:08:58.339Z" }, - { url = "https://files.pythonhosted.org/packages/f5/c8/291c49cefaa4a9248e986256df2ade7add79388fe179e0691be06fae6f37/ruff-0.15.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9457e3c3291024866222b96108ab2d8265b477e5b1534c7ddb1810904858d16", size = 10518811, upload-time = "2026-02-12T23:09:31.865Z" }, - { url = "https://files.pythonhosted.org/packages/c3/1a/f5707440e5ae43ffa5365cac8bbb91e9665f4a883f560893829cf16a606b/ruff-0.15.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92c92b003e9d4f7fbd33b1867bb15a1b785b1735069108dfc23821ba045b29bc", size = 10196169, upload-time = "2026-02-12T23:09:17.306Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ff/26ddc8c4da04c8fd3ee65a89c9fb99eaa5c30394269d424461467be2271f/ruff-0.15.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe5c41ab43e3a06778844c586251eb5a510f67125427625f9eb2b9526535779", size = 10990491, upload-time = "2026-02-12T23:09:25.503Z" }, - { url = "https://files.pythonhosted.org/packages/fc/00/50920cb385b89413f7cdb4bb9bc8fc59c1b0f30028d8bccc294189a54955/ruff-0.15.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66a6dd6df4d80dc382c6484f8ce1bcceb55c32e9f27a8b94c32f6c7331bf14fb", size = 11843280, upload-time = "2026-02-12T23:09:19.88Z" }, - { url = "https://files.pythonhosted.org/packages/5d/6d/2f5cad8380caf5632a15460c323ae326f1e1a2b5b90a6ee7519017a017ca/ruff-0.15.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a4a42cbb8af0bda9bcd7606b064d7c0bc311a88d141d02f78920be6acb5aa83", size = 11274336, upload-time = "2026-02-12T23:09:14.907Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1d/5f56cae1d6c40b8a318513599b35ea4b075d7dc1cd1d04449578c29d1d75/ruff-0.15.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab064052c31dddada35079901592dfba2e05f5b1e43af3954aafcbc1096a5b2", size = 11137288, upload-time = "2026-02-12T23:09:07.475Z" }, - { url = "https://files.pythonhosted.org/packages/cd/20/6f8d7d8f768c93b0382b33b9306b3b999918816da46537d5a61635514635/ruff-0.15.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5631c940fe9fe91f817a4c2ea4e81f47bee3ca4aa646134a24374f3c19ad9454", size = 11070681, upload-time = "2026-02-12T23:08:55.43Z" }, - { url = "https://files.pythonhosted.org/packages/9a/67/d640ac76069f64cdea59dba02af2e00b1fa30e2103c7f8d049c0cff4cafd/ruff-0.15.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:68138a4ba184b4691ccdc39f7795c66b3c68160c586519e7e8444cf5a53e1b4c", size = 10486401, upload-time = "2026-02-12T23:09:27.927Z" }, - { url = "https://files.pythonhosted.org/packages/65/3d/e1429f64a3ff89297497916b88c32a5cc88eeca7e9c787072d0e7f1d3e1e/ruff-0.15.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:518f9af03bfc33c03bdb4cb63fabc935341bb7f54af500f92ac309ecfbba6330", size = 10197452, upload-time = "2026-02-12T23:09:12.147Z" }, - { url = "https://files.pythonhosted.org/packages/78/83/e2c3bade17dad63bf1e1c2ffaf11490603b760be149e1419b07049b36ef2/ruff-0.15.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da79f4d6a826caaea95de0237a67e33b81e6ec2e25fc7e1993a4015dffca7c61", size = 10693900, upload-time = "2026-02-12T23:09:34.418Z" }, - { url = "https://files.pythonhosted.org/packages/a1/27/fdc0e11a813e6338e0706e8b39bb7a1d61ea5b36873b351acee7e524a72a/ruff-0.15.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3dd86dccb83cd7d4dcfac303ffc277e6048600dfc22e38158afa208e8bf94a1f", size = 11227302, upload-time = "2026-02-12T23:09:36.536Z" }, - { url = "https://files.pythonhosted.org/packages/f6/58/ac864a75067dcbd3b95be5ab4eb2b601d7fbc3d3d736a27e391a4f92a5c1/ruff-0.15.1-py3-none-win32.whl", hash = "sha256:660975d9cb49b5d5278b12b03bb9951d554543a90b74ed5d366b20e2c57c2098", size = 10462555, upload-time = "2026-02-12T23:09:29.899Z" }, - { url = "https://files.pythonhosted.org/packages/e0/5e/d4ccc8a27ecdb78116feac4935dfc39d1304536f4296168f91ed3ec00cd2/ruff-0.15.1-py3-none-win_amd64.whl", hash = "sha256:c820fef9dd5d4172a6570e5721704a96c6679b80cf7be41659ed439653f62336", size = 11599956, upload-time = "2026-02-12T23:09:01.157Z" }, - { url = "https://files.pythonhosted.org/packages/2a/07/5bda6a85b220c64c65686bc85bd0bbb23b29c62b3a9f9433fa55f17cda93/ruff-0.15.1-py3-none-win_arm64.whl", hash = "sha256:5ff7d5f0f88567850f45081fac8f4ec212be8d0b963e385c3f7d0d2eb4899416", size = 10874604, upload-time = "2026-02-12T23:09:05.515Z" }, +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" }, + { url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" }, + { url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" }, + { url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" }, + { url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" }, + { url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" }, + { url = "https://files.pythonhosted.org/packages/1b/fa/2ef715a1cd329ef47c1a050e10dee91a9054b7ce2fcfdd6a06d139afb7ec/ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22", size = 10506664, upload-time = "2026-02-26T20:03:50.56Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a8/c688ef7e29983976820d18710f955751d9f4d4eb69df658af3d006e2ba3e/ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f", size = 11651048, upload-time = "2026-02-26T20:04:17.191Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0a/9e1be9035b37448ce2e68c978f0591da94389ade5a5abafa4cf99985d1b2/ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453", size = 10966776, upload-time = "2026-02-26T20:03:56.908Z" }, ] [[package]] @@ -2085,44 +1850,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, ] -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, -] - [[package]] name = "sse-starlette" -version = "3.2.0" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/c3695c2d2d4ef70072c3a06992850498b01c6bc9be531950813716b426fa/sse_starlette-3.3.2.tar.gz", hash = "sha256:678fca55a1945c734d8472a6cad186a55ab02840b4f6786f5ee8770970579dcd", size = 32326, upload-time = "2026-02-28T11:24:34.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" }, + { url = "https://files.pythonhosted.org/packages/61/28/8cb142d3fe80c4a2d8af54ca0b003f47ce0ba920974e7990fa6e016402d1/sse_starlette-3.3.2-py3-none-any.whl", hash = "sha256:5c3ea3dad425c601236726af2f27689b74494643f57017cafcb6f8c9acfbb862", size = 14270, upload-time = "2026-02-28T11:24:32.984Z" }, ] [[package]] @@ -2140,41 +1878,26 @@ wheels = [ [[package]] name = "ty" -version = "0.0.17" +version = "0.0.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/c3/41ae6346443eedb65b96761abfab890a48ce2aa5a8a27af69c5c5d99064d/ty-0.0.17.tar.gz", hash = "sha256:847ed6c120913e280bf9b54d8eaa7a1049708acb8824ad234e71498e8ad09f97", size = 5167209, upload-time = "2026-02-13T13:26:36.835Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/01/0ef15c22a1c54b0f728ceff3f62d478dbf8b0dcf8ff7b80b954f79584f3e/ty-0.0.17-py3-none-linux_armv6l.whl", hash = "sha256:64a9a16555cc8867d35c2647c2f1afbd3cae55f68fd95283a574d1bb04fe93e0", size = 10192793, upload-time = "2026-02-13T13:27:13.943Z" }, - { url = "https://files.pythonhosted.org/packages/0f/2c/f4c322d9cded56edc016b1092c14b95cf58c8a33b4787316ea752bb9418e/ty-0.0.17-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:eb2dbd8acd5c5a55f4af0d479523e7c7265a88542efe73ed3d696eb1ba7b6454", size = 10051977, upload-time = "2026-02-13T13:26:57.741Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a5/43746c1ff81e784f5fc303afc61fe5bcd85d0fcf3ef65cb2cef78c7486c7/ty-0.0.17-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f18f5fd927bc628deb9ea2df40f06b5f79c5ccf355db732025a3e8e7152801f6", size = 9564639, upload-time = "2026-02-13T13:26:42.781Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b8/280b04e14a9c0474af574f929fba2398b5e1c123c1e7735893b4cd73d13c/ty-0.0.17-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5383814d1d7a5cc53b3b07661856bab04bb2aac7a677c8d33c55169acdaa83df", size = 10061204, upload-time = "2026-02-13T13:27:00.152Z" }, - { url = "https://files.pythonhosted.org/packages/2a/d7/493e1607d8dfe48288d8a768a2adc38ee27ef50e57f0af41ff273987cda0/ty-0.0.17-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c20423b8744b484f93e7bf2ef8a9724bca2657873593f9f41d08bd9f83444c9", size = 10013116, upload-time = "2026-02-13T13:26:34.543Z" }, - { url = "https://files.pythonhosted.org/packages/80/ef/22f3ed401520afac90dbdf1f9b8b7755d85b0d5c35c1cb35cf5bd11b59c2/ty-0.0.17-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6f5b1aba97db9af86517b911674b02f5bc310750485dc47603a105bd0e83ddd", size = 10533623, upload-time = "2026-02-13T13:26:31.449Z" }, - { url = "https://files.pythonhosted.org/packages/75/ce/744b15279a11ac7138832e3a55595706b4a8a209c9f878e3ab8e571d9032/ty-0.0.17-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:488bce1a9bea80b851a97cd34c4d2ffcd69593d6c3f54a72ae02e5c6e47f3d0c", size = 11069750, upload-time = "2026-02-13T13:26:48.638Z" }, - { url = "https://files.pythonhosted.org/packages/f2/be/1133c91f15a0e00d466c24f80df486d630d95d1b2af63296941f7473812f/ty-0.0.17-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8df66b91ec84239420985ec215e7f7549bfda2ac036a3b3c065f119d1c06825a", size = 10870862, upload-time = "2026-02-13T13:26:54.715Z" }, - { url = "https://files.pythonhosted.org/packages/3e/4a/a2ed209ef215b62b2d3246e07e833081e07d913adf7e0448fc204be443d6/ty-0.0.17-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:002139e807c53002790dfefe6e2f45ab0e04012e76db3d7c8286f96ec121af8f", size = 10628118, upload-time = "2026-02-13T13:26:45.439Z" }, - { url = "https://files.pythonhosted.org/packages/b3/0c/87476004cb5228e9719b98afffad82c3ef1f84334bde8527bcacba7b18cb/ty-0.0.17-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6c4e01f05ce82e5d489ab3900ca0899a56c4ccb52659453780c83e5b19e2b64c", size = 10038185, upload-time = "2026-02-13T13:27:02.693Z" }, - { url = "https://files.pythonhosted.org/packages/46/4b/98f0b3ba9aef53c1f0305519536967a4aa793a69ed72677b0a625c5313ac/ty-0.0.17-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2b226dd1e99c0d2152d218c7e440150d1a47ce3c431871f0efa073bbf899e881", size = 10047644, upload-time = "2026-02-13T13:27:05.474Z" }, - { url = "https://files.pythonhosted.org/packages/93/e0/06737bb80aa1a9103b8651d2eb691a7e53f1ed54111152be25f4a02745db/ty-0.0.17-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8b11f1da7859e0ad69e84b3c5ef9a7b055ceed376a432fad44231bdfc48061c2", size = 10231140, upload-time = "2026-02-13T13:27:10.844Z" }, - { url = "https://files.pythonhosted.org/packages/7c/79/e2a606bd8852383ba9abfdd578f4a227bd18504145381a10a5f886b4e751/ty-0.0.17-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c04e196809ff570559054d3e011425fd7c04161529eb551b3625654e5f2434cb", size = 10718344, upload-time = "2026-02-13T13:26:51.66Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2d/2663984ac11de6d78f74432b8b14ba64d170b45194312852b7543cf7fd56/ty-0.0.17-py3-none-win32.whl", hash = "sha256:305b6ed150b2740d00a817b193373d21f0767e10f94ac47abfc3b2e5a5aec809", size = 9672932, upload-time = "2026-02-13T13:27:08.522Z" }, - { url = "https://files.pythonhosted.org/packages/de/b5/39be78f30b31ee9f5a585969930c7248354db90494ff5e3d0756560fb731/ty-0.0.17-py3-none-win_amd64.whl", hash = "sha256:531828267527aee7a63e972f54e5eee21d9281b72baf18e5c2850c6b862add83", size = 10542138, upload-time = "2026-02-13T13:27:17.084Z" }, - { url = "https://files.pythonhosted.org/packages/40/b7/f875c729c5d0079640c75bad2c7e5d43edc90f16ba242f28a11966df8f65/ty-0.0.17-py3-none-win_arm64.whl", hash = "sha256:de9810234c0c8d75073457e10a84825b9cd72e6629826b7f01c7a0b266ae25b1", size = 10023068, upload-time = "2026-02-13T13:26:39.637Z" }, -] - -[[package]] -name = "typer" -version = "0.23.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-doc" }, - { name = "click" }, - { name = "rich" }, - { name = "shellingham" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fd/07/b822e1b307d40e263e8253d2384cf98c51aa2368cc7ba9a07e523a1d964b/typer-0.23.1.tar.gz", hash = "sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134", size = 120047, upload-time = "2026-02-13T10:04:30.984Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/95/8de69bb98417227b01f1b1d743c819d6456c9fd140255b6124b05b17dfd6/ty-0.0.20.tar.gz", hash = "sha256:ebba6be7974c14efbb2a9adda6ac59848f880d7259f089dfa72a093039f1dcc6", size = 5262529, upload-time = "2026-03-02T15:51:36.587Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/91/9b286ab899c008c2cb05e8be99814807e7fbbd33f0c0c960470826e5ac82/typer-0.23.1-py3-none-any.whl", hash = "sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e", size = 56813, upload-time = "2026-02-13T10:04:32.008Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2c/718abe48393e521bf852cd6b0f984766869b09c258d6e38a118768a91731/ty-0.0.20-py3-none-linux_armv6l.whl", hash = "sha256:7cc12769c169c9709a829c2248ee2826b7aae82e92caeac813d856f07c021eae", size = 10333656, upload-time = "2026-03-02T15:51:56.461Z" }, + { url = "https://files.pythonhosted.org/packages/41/0e/eb1c4cc4a12862e2327b72657bcebb10b7d9f17046f1bdcd6457a0211615/ty-0.0.20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b777c1bf13bc0a95985ebb8a324b8668a4a9b2e514dde5ccf09e4d55d2ff232", size = 10168505, upload-time = "2026-03-02T15:51:51.895Z" }, + { url = "https://files.pythonhosted.org/packages/89/7f/10230798e673f0dd3094dfd16e43bfd90e9494e7af6e8e7db516fb431ddf/ty-0.0.20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b2a4a7db48bf8cba30365001bc2cad7fd13c1a5aacdd704cc4b7925de8ca5eb3", size = 9678510, upload-time = "2026-03-02T15:51:48.451Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/59d9159577494edd1728f7db77b51bb07884bd21384f517963114e3ab5f6/ty-0.0.20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6846427b8b353a43483e9c19936dc6a25612573b44c8f7d983dfa317e7f00d4c", size = 10162926, upload-time = "2026-03-02T15:51:40.558Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a8/b7273eec3e802f78eb913fbe0ce0c16ef263723173e06a5776a8359b2c66/ty-0.0.20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245ceef5bd88df366869385cf96411cb14696334f8daa75597cf7e41c3012eb8", size = 10171702, upload-time = "2026-03-02T15:51:44.069Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/5f1144f2f04a275109db06e3498450c4721554215b80ae73652ef412eeab/ty-0.0.20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4d21d1cdf67a444d3c37583c17291ddba9382a9871021f3f5d5735e09e85efe", size = 10682552, upload-time = "2026-03-02T15:51:33.102Z" }, + { url = "https://files.pythonhosted.org/packages/6a/db/9f1f637310792f12bd6ed37d5fc8ab39ba1a9b0c6c55a33865e9f1cad840/ty-0.0.20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd4ffd907d1bd70e46af9e9a2f88622f215e1bf44658ea43b32c2c0b357299e4", size = 11242605, upload-time = "2026-03-02T15:51:34.895Z" }, + { url = "https://files.pythonhosted.org/packages/1a/68/cc9cae2e732fcfd20ccdffc508407905a023fc8493b8771c392d915528dc/ty-0.0.20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6594b58d8b0e9d16a22b3045fc1305db4b132c8d70c17784ab8c7a7cc986807", size = 10974655, upload-time = "2026-03-02T15:51:46.011Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c1/b9e3e3f28fe63486331e653f6aeb4184af8b1fe80542fcf74d2dda40a93d/ty-0.0.20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3662f890518ce6cf4d7568f57d03906912d2afbf948a01089a28e325b1ef198c", size = 10761325, upload-time = "2026-03-02T15:51:26.818Z" }, + { url = "https://files.pythonhosted.org/packages/39/9e/67db935bdedf219a00fb69ec5437ba24dab66e0f2e706dd54a4eca234b84/ty-0.0.20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0e3ffbae58f9f0d17cdc4ac6d175ceae560b7ed7d54f9ddfb1c9f31054bcdc2c", size = 10145793, upload-time = "2026-03-02T15:51:38.562Z" }, + { url = "https://files.pythonhosted.org/packages/c7/de/b0eb815d4dc5a819c7e4faddc2a79058611169f7eef07ccc006531ce228c/ty-0.0.20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:176e52bc8bb00b0e84efd34583962878a447a3a0e34ecc45fd7097a37554261b", size = 10189640, upload-time = "2026-03-02T15:51:50.202Z" }, + { url = "https://files.pythonhosted.org/packages/b8/71/63734923965cbb70df1da3e93e4b8875434e326b89e9f850611122f279bf/ty-0.0.20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2bc73025418e976ca4143dde71fb9025a90754a08ac03e6aa9b80d4bed1294b", size = 10370568, upload-time = "2026-03-02T15:51:42.295Z" }, + { url = "https://files.pythonhosted.org/packages/32/a0/a532c2048533347dff48e9ca98bd86d2c224356e101688a8edaf8d6973fb/ty-0.0.20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d52f7c9ec6e363e094b3c389c344d5a140401f14a77f0625e3f28c21918552f5", size = 10853999, upload-time = "2026-03-02T15:51:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/48/88/36c652c658fe96658043e4abc8ea97801de6fb6e63ab50aaa82807bff1d8/ty-0.0.20-py3-none-win32.whl", hash = "sha256:c7d32bfe93f8fcaa52b6eef3f1b930fd7da410c2c94e96f7412c30cfbabf1d17", size = 9744206, upload-time = "2026-03-02T15:51:54.183Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a7/a4a13bed1d7fd9d97aaa3c5bb5e6d3e9a689e6984806cbca2ab4c9233cac/ty-0.0.20-py3-none-win_amd64.whl", hash = "sha256:a5e10f40fc4a0a1cbcb740a4aad5c7ce35d79f030836ea3183b7a28f43170248", size = 10711999, upload-time = "2026-03-02T15:51:29.212Z" }, + { url = "https://files.pythonhosted.org/packages/8d/7e/6bfd748a9f4ff9267ed3329b86a0f02cdf6ab49f87bc36c8a164852f99fc/ty-0.0.20-py3-none-win_arm64.whl", hash = "sha256:53f7a5c12c960e71f160b734f328eff9a35d578af4b67a36b0bb5990ac5cdc27", size = 10150143, upload-time = "2026-03-02T15:51:31.283Z" }, ] [[package]] @@ -2199,39 +1922,110 @@ wheels = [ ] [[package]] -name = "urllib3" -version = "2.6.3" +name = "uncalled-for" +version = "0.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/7c/b5b7d8136f872e3f13b0584e576886de0489d7213a12de6bebf29ff6ebfc/uncalled_for-0.2.0.tar.gz", hash = "sha256:b4f8fdbcec328c5a113807d653e041c5094473dd4afa7c34599ace69ccb7e69f", size = 49488, upload-time = "2026-02-27T17:40:58.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/ff/7f/4320d9ce3be404e6310b915c3629fe27bf1e2f438a1a7a3cb0396e32e9a9/uncalled_for-0.2.0-py3-none-any.whl", hash = "sha256:2c0bd338faff5f930918f79e7eb9ff48290df2cb05fcc0b40a7f334e55d4d85f", size = 11351, upload-time = "2026-02-27T17:40:56.804Z" }, ] [[package]] name = "uvicorn" -version = "0.40.0" +version = "0.41.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/32/ce/eeb58ae4ac36fe09e3842eb02e0eb676bf2c53ae062b98f1b2531673efdd/uvicorn-0.41.0.tar.gz", hash = "sha256:09d11cf7008da33113824ee5a1c6422d89fbc2ff476540d69a34c87fab8b571a", size = 82633, upload-time = "2026-02-16T23:07:24.1Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/83/e4/d04a086285c20886c0daad0e026f250869201013d18f81d9ff5eada73a88/uvicorn-0.41.0-py3-none-any.whl", hash = "sha256:29e35b1d2c36a04b9e180d4007ede3bcb32a85fbdfd6c6aeb3f26839de088187", size = 68783, upload-time = "2026-02-16T23:07:22.357Z" }, ] [[package]] name = "virtualenv" -version = "20.36.1" +version = "21.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "python-discovery" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/c9/18d4b36606d6091844daa3bd93cf7dc78e6f5da21d9f21d06c221104b684/virtualenv-21.1.0.tar.gz", hash = "sha256:1990a0188c8f16b6b9cf65c9183049007375b26aad415514d377ccacf1e4fb44", size = 5840471, upload-time = "2026-02-27T08:49:29.702Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, + { url = "https://files.pythonhosted.org/packages/78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/virtualenv-21.1.0-py3-none-any.whl", hash = "sha256:164f5e14c5587d170cf98e60378eb91ea35bf037be313811905d3a24ea33cc07", size = 5825072, upload-time = "2026-02-27T08:49:27.516Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, ] [[package]] @@ -2299,96 +2093,106 @@ wheels = [ [[package]] name = "yarl" -version = "1.22.0" +version = "1.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, - { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, - { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, - { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, - { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, - { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, - { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, - { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, - { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, - { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, - { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, - { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, - { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, - { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, - { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, - { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, - { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, - { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, - { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, - { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, - { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, - { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, - { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, - { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, - { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, - { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, - { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, - { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, - { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, - { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, - { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, - { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, - { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, - { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, - { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, - { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, - { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, - { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, - { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, - { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, - { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, - { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, - { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, - { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, - { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, - { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, - { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, - { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, - { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, - { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, - { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, ] [[package]] From 7d2363eb2d8d7abe87db2d5ebef6fa594cf9960b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 20:30:31 +0100 Subject: [PATCH 424/565] chore(deps): bump fastmcp constraint to >=3.0.0 Lock file already has 3.1.0 since #166; align pyproject.toml floor to prevent accidental downgrades to v2. Resolves: #190 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7f8cbd7b..92583f9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ "Operating System :: OS Independent", ] dependencies = [ - "fastmcp>=2.14.0", + "fastmcp>=3.0.0", "inquirer>=3.4.0", "patchright>=1.40.0", "python-dotenv>=1.1.1", diff --git a/uv.lock b/uv.lock index 742d0d99..e144ed14 100644 --- a/uv.lock +++ b/uv.lock @@ -968,7 +968,7 @@ dev = [ [package.metadata] requires-dist = [ - { name = "fastmcp", specifier = ">=2.14.0" }, + { name = "fastmcp", specifier = ">=3.0.0" }, { name = "inquirer", specifier = ">=3.4.0" }, { name = "patchright", specifier = ">=1.40.0" }, { name = "python-dotenv", specifier = ">=1.1.1" }, From b9663862afe01ee1dd1d96505f5f82496087afb3 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 20:48:00 +0100 Subject: [PATCH 425/565] refactor(error-handler): replace handle_tool_error with ToolError Replace dict-returning handle_tool_error() with raise_tool_error() that raises FastMCP ToolError for known exceptions. Unknown exceptions re-raise as-is for mask_error_details=True to handle. Resolves: #185 --- linkedin_mcp_server/error_handler.py | 155 +++++++++----------------- linkedin_mcp_server/server.py | 8 +- linkedin_mcp_server/tools/__init__.py | 2 +- linkedin_mcp_server/tools/company.py | 6 +- linkedin_mcp_server/tools/job.py | 6 +- linkedin_mcp_server/tools/person.py | 6 +- tests/test_error_handler.py | 76 ++++++++----- tests/test_tools.py | 6 +- 8 files changed, 117 insertions(+), 148 deletions(-) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 2db417e7..1a0eecfe 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -1,12 +1,15 @@ """ -Centralized error handling for LinkedIn MCP Server with structured responses. +Centralized error handling for LinkedIn MCP Server using FastMCP ToolError. -Provides DRY approach to error handling across all tools with consistent MCP response -format, specific LinkedIn error categorization, and proper logging integration. +Provides raise_tool_error() which maps known LinkedIn exceptions to user-friendly +ToolError messages. Unknown exceptions are re-raised as-is for mask_error_details +to handle. """ import logging -from typing import Any, Dict +from typing import NoReturn + +from fastmcp.exceptions import ToolError from linkedin_mcp_server.core.exceptions import ( AuthenticationError, @@ -27,126 +30,72 @@ logger = logging.getLogger(__name__) -def handle_tool_error(exception: Exception, context: str = "") -> Dict[str, Any]: - """ - Handle errors from tool functions and return structured responses. - - Args: - exception: The exception that occurred - context: Context about which tool failed - - Returns: - Structured error response dictionary +def raise_tool_error(exception: Exception) -> NoReturn: """ - return convert_exception_to_response(exception, context) - + Raise a ToolError for known LinkedIn exceptions, or re-raise unknown ones. -def convert_exception_to_response( - exception: Exception, context: str = "" -) -> Dict[str, Any]: - """ - Convert an exception to a structured MCP response. + Known exceptions are mapped to user-friendly messages via ToolError. + Unknown exceptions are re-raised as-is so mask_error_details can mask them. Args: - exception: The exception to convert - context: Additional context about where the error occurred + exception: The exception that occurred - Returns: - Structured error response dictionary + Raises: + ToolError: For known LinkedIn exception types + Exception: Re-raises unknown exceptions as-is """ if isinstance(exception, CredentialsNotFoundError): - logger.warning("Credentials not found in %s: %s", context, exception) - return { - "error": "authentication_not_found", - "message": str(exception), - "resolution": "Run with --login to create a browser profile.", - } + logger.warning("Credentials not found: %s", exception) + raise ToolError( + "Authentication not found. Run with --login to create a browser profile." + ) from exception elif isinstance(exception, SessionExpiredError): - logger.warning("Session expired in %s: %s", context, exception) - return { - "error": "session_expired", - "message": str(exception), - "resolution": "Run with --login to create a new browser profile.", - } + logger.warning("Session expired: %s", exception) + raise ToolError( + "Session expired. Run with --login to create a new browser profile." + ) from exception elif isinstance(exception, AuthenticationError): - logger.warning("Authentication failed in %s: %s", context, exception) - return { - "error": "authentication_failed", - "message": str(exception), - "resolution": "Run with --login to re-authenticate.", - } + logger.warning("Authentication failed: %s", exception) + raise ToolError( + "Authentication failed. Run with --login to re-authenticate." + ) from exception elif isinstance(exception, RateLimitError): wait_time = getattr(exception, "suggested_wait_time", 300) - logger.warning("Rate limit in %s: %s (wait=%ds)", context, exception, wait_time) - return { - "error": "rate_limit", - "message": str(exception), - "suggested_wait_seconds": wait_time, - "resolution": f"LinkedIn rate limit detected. Wait {wait_time} seconds before trying again.", - } + logger.warning("Rate limit: %s (wait=%ds)", exception, wait_time) + raise ToolError( + f"Rate limit detected. Wait {wait_time} seconds before trying again." + ) from exception elif isinstance(exception, ProfileNotFoundError): - logger.warning("Profile not found in %s: %s", context, exception) - return { - "error": "profile_not_found", - "message": str(exception), - "resolution": "Check the profile URL is correct and the profile exists.", - } + logger.warning("Profile not found: %s", exception) + raise ToolError( + "Profile not found. Check the profile URL is correct." + ) from exception elif isinstance(exception, ElementNotFoundError): - logger.warning("Element not found in %s: %s", context, exception) - return { - "error": "element_not_found", - "message": str(exception), - "resolution": "LinkedIn page structure may have changed. Please report this issue.", - } + logger.warning("Element not found: %s", exception) + raise ToolError( + "Element not found. LinkedIn page structure may have changed." + ) from exception elif isinstance(exception, NetworkError): - logger.warning("Network error in %s: %s", context, exception) - return { - "error": "network_error", - "message": str(exception), - "resolution": "Check your network connection and try again.", - } + logger.warning("Network error: %s", exception) + raise ToolError( + "Network error. Check your connection and try again." + ) from exception elif isinstance(exception, ScrapingError): - logger.warning("Scraping error in %s: %s", context, exception) - return { - "error": "scraping_error", - "message": str(exception), - "resolution": "Failed to extract data from LinkedIn. The page structure may have changed.", - } - - elif isinstance(exception, LinkedInScraperException): - logger.warning("Scraper error in %s: %s", context, exception) - return { - "error": "linkedin_scraper_error", - "message": str(exception), - } - - elif isinstance(exception, LinkedInMCPError): - logger.warning("MCP error in %s: %s", context, exception) - return { - "error": "linkedin_mcp_error", - "message": str(exception), - } + logger.warning("Scraping error: %s", exception) + raise ToolError( + "Scraping failed. LinkedIn page structure may have changed." + ) from exception + + elif isinstance(exception, (LinkedInScraperException, LinkedInMCPError)): + logger.warning("LinkedIn error: %s", exception) + raise ToolError(str(exception)) from exception else: - logger.error( - "Unexpected error in %s: %s", - context, - exception, - exc_info=True, - extra={ - "context": context, - "exception_type": type(exception).__name__, - "exception_message": str(exception), - }, - ) - return { - "error": "unknown_error", - "message": f"Failed to execute {context}: {str(exception)}", - } + raise exception diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 39126fd8..ae4ebb2c 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -12,6 +12,7 @@ from fastmcp import FastMCP from linkedin_mcp_server.drivers.browser import close_browser +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools @@ -30,7 +31,7 @@ async def lifespan(app: FastMCP) -> AsyncIterator[None]: def create_mcp_server() -> FastMCP: """Create and configure the MCP server with all LinkedIn tools.""" - mcp = FastMCP("linkedin_scraper", lifespan=lifespan) + mcp = FastMCP("linkedin_scraper", lifespan=lifespan, mask_error_details=True) # Register all tools register_person_tools(mcp) @@ -48,9 +49,6 @@ async def close_session() -> Dict[str, Any]: "message": "Successfully closed the browser session and cleaned up resources", } except Exception as e: - return { - "status": "error", - "message": f"Error closing browser session: {str(e)}", - } + raise_tool_error(e) return mcp diff --git a/linkedin_mcp_server/tools/__init__.py b/linkedin_mcp_server/tools/__init__.py index aa5b9088..52bb04d7 100644 --- a/linkedin_mcp_server/tools/__init__.py +++ b/linkedin_mcp_server/tools/__init__.py @@ -13,7 +13,7 @@ Architecture: - FastMCP integration for MCP-compliant tool registration -- Shared error handling through centralized error_handler module +- ToolError-based error handling through centralized raise_tool_error() - Singleton driver pattern for session persistence - Structured data return format for consistent MCP responses """ diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 9699ee5c..9797a7dc 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -15,7 +15,7 @@ ensure_authenticated, get_or_create_browser, ) -from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections logger = logging.getLogger(__name__) @@ -82,7 +82,7 @@ async def get_company_profile( return result except Exception as e: - return handle_tool_error(e, "get_company_profile") + raise_tool_error(e) @mcp.tool( annotations=ToolAnnotations( @@ -134,4 +134,4 @@ async def get_company_posts( } except Exception as e: - return handle_tool_error(e, "get_company_posts") + raise_tool_error(e) diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index ef4af8a7..c9e4005a 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -14,7 +14,7 @@ ensure_authenticated, get_or_create_browser, ) -from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor logger = logging.getLogger(__name__) @@ -62,7 +62,7 @@ async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: return result except Exception as e: - return handle_tool_error(e, "get_job_details") + raise_tool_error(e) @mcp.tool( annotations=ToolAnnotations( @@ -112,4 +112,4 @@ async def search_jobs( return result except Exception as e: - return handle_tool_error(e, "search_jobs") + raise_tool_error(e) diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 3fddf2ad..90f8c2d9 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -15,7 +15,7 @@ ensure_authenticated, get_or_create_browser, ) -from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_person_sections logger = logging.getLogger(__name__) @@ -83,7 +83,7 @@ async def get_person_profile( return result except Exception as e: - return handle_tool_error(e, "get_person_profile") + raise_tool_error(e) @mcp.tool( annotations=ToolAnnotations( @@ -133,4 +133,4 @@ async def search_people( return result except Exception as e: - return handle_tool_error(e, "search_people") + raise_tool_error(e) diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py index a9acad94..59bfb5d8 100644 --- a/tests/test_error_handler.py +++ b/tests/test_error_handler.py @@ -1,44 +1,64 @@ -from linkedin_mcp_server.core.exceptions import RateLimitError +import pytest +from fastmcp.exceptions import ToolError -from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.core.exceptions import ( + NetworkError, + ProfileNotFoundError, + RateLimitError, + ScrapingError, +) +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.exceptions import ( CredentialsNotFoundError, + LinkedInMCPError, SessionExpiredError, ) -def test_handles_session_expired(): - result = handle_tool_error(SessionExpiredError(), "test_tool") - assert result["error"] == "session_expired" - assert "message" in result - assert "resolution" in result - +def test_raises_tool_error_for_session_expired(): + with pytest.raises(ToolError, match="Session expired"): + raise_tool_error(SessionExpiredError()) -def test_handles_credentials_not_found(): - result = handle_tool_error(CredentialsNotFoundError("no creds"), "test_tool") - assert result["error"] == "authentication_not_found" +def test_raises_tool_error_for_credentials_not_found(): + with pytest.raises(ToolError, match="Authentication not found"): + raise_tool_error(CredentialsNotFoundError("no creds")) -def test_handles_generic_exception(): - result = handle_tool_error(ValueError("oops"), "test_tool") - assert result["error"] == "unknown_error" - assert "oops" in result["message"] - -def test_handles_rate_limit_with_suggested_wait(): - """Test RateLimitError with custom suggested_wait_time attribute.""" +def test_raises_tool_error_for_rate_limit_with_custom_wait(): error = RateLimitError("Rate limited") error.suggested_wait_time = 600 - result = handle_tool_error(error, "test_tool") - assert result["error"] == "rate_limit" - assert result["suggested_wait_seconds"] == 600 - assert "600" in result["resolution"] + with pytest.raises(ToolError, match="Wait 600 seconds"): + raise_tool_error(error) -def test_handles_rate_limit_default_wait(): - """Test RateLimitError without suggested_wait_time uses default 300.""" +def test_raises_tool_error_for_rate_limit_default_wait(): error = RateLimitError("Rate limited") - result = handle_tool_error(error, "test_tool") - assert result["error"] == "rate_limit" - assert result["suggested_wait_seconds"] == 300 - assert "300" in result["resolution"] + with pytest.raises(ToolError, match="Wait 300 seconds"): + raise_tool_error(error) + + +def test_raises_tool_error_for_profile_not_found(): + with pytest.raises(ToolError, match="Profile not found"): + raise_tool_error(ProfileNotFoundError("gone")) + + +def test_raises_tool_error_for_network_error(): + with pytest.raises(ToolError, match="Network error"): + raise_tool_error(NetworkError("timeout")) + + +def test_raises_tool_error_for_scraping_error(): + with pytest.raises(ToolError, match="Scraping failed"): + raise_tool_error(ScrapingError("bad html")) + + +def test_raises_tool_error_for_linkedin_mcp_error(): + with pytest.raises(ToolError, match="custom mcp error"): + raise_tool_error(LinkedInMCPError("custom mcp error")) + + +def test_reraises_unknown_exception(): + """Unknown exceptions are re-raised as-is, not wrapped in ToolError.""" + with pytest.raises(ValueError, match="oops"): + raise_tool_error(ValueError("oops")) diff --git a/tests/test_tools.py b/tests/test_tools.py index c1950326..338df459 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -131,6 +131,8 @@ async def test_get_person_profile_unknown_section( assert result["unknown_sections"] == ["bogus_section"] async def test_get_person_profile_error(self, mock_context, monkeypatch): + from fastmcp.exceptions import ToolError + from linkedin_mcp_server.exceptions import SessionExpiredError monkeypatch.setattr( @@ -144,8 +146,8 @@ async def test_get_person_profile_error(self, mock_context, monkeypatch): register_person_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_person_profile") - result = await tool_fn("test-user", mock_context) - assert result["error"] == "session_expired" + with pytest.raises(ToolError, match="Session expired"): + await tool_fn("test-user", mock_context) async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): expected = { From b7f4fd1fa1ac9f58a0e678557b9445ca292451cc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 20:59:40 +0100 Subject: [PATCH 426/565] fix(error-handler): add logging for unknown exceptions and missing tests Add logger.error with exc_info for unknown exceptions before re-raising, and add test coverage for AuthenticationError and ElementNotFoundError. --- linkedin_mcp_server/error_handler.py | 1 + tests/test_error_handler.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 1a0eecfe..b35f6ff3 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -98,4 +98,5 @@ def raise_tool_error(exception: Exception) -> NoReturn: raise ToolError(str(exception)) from exception else: + logger.error("Unexpected error: %s", exception, exc_info=True) raise exception diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py index 59bfb5d8..8ca71b90 100644 --- a/tests/test_error_handler.py +++ b/tests/test_error_handler.py @@ -58,6 +58,20 @@ def test_raises_tool_error_for_linkedin_mcp_error(): raise_tool_error(LinkedInMCPError("custom mcp error")) +def test_raises_tool_error_for_authentication_error(): + from linkedin_mcp_server.core.exceptions import AuthenticationError + + with pytest.raises(ToolError, match="Authentication failed"): + raise_tool_error(AuthenticationError("bad creds")) + + +def test_raises_tool_error_for_element_not_found(): + from linkedin_mcp_server.core.exceptions import ElementNotFoundError + + with pytest.raises(ToolError, match="Element not found"): + raise_tool_error(ElementNotFoundError("missing")) + + def test_reraises_unknown_exception(): """Unknown exceptions are re-raised as-is, not wrapped in ToolError.""" with pytest.raises(ValueError, match="oops"): From 7f9177547f793c5a2e649affcfa08ffe5e4c9042 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 21:11:04 +0100 Subject: [PATCH 427/565] fix(error-handler): restore tool context in logs and add missing test Re-add optional context parameter to raise_tool_error() for log correlation, and add test for base LinkedInScraperException branch. --- linkedin_mcp_server/error_handler.py | 25 ++++++++++++++----------- linkedin_mcp_server/server.py | 2 +- linkedin_mcp_server/tools/company.py | 4 ++-- linkedin_mcp_server/tools/job.py | 4 ++-- linkedin_mcp_server/tools/person.py | 4 ++-- tests/test_error_handler.py | 7 +++++++ 6 files changed, 28 insertions(+), 18 deletions(-) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index b35f6ff3..a40331cd 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -30,7 +30,7 @@ logger = logging.getLogger(__name__) -def raise_tool_error(exception: Exception) -> NoReturn: +def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: """ Raise a ToolError for known LinkedIn exceptions, or re-raise unknown ones. @@ -39,64 +39,67 @@ def raise_tool_error(exception: Exception) -> NoReturn: Args: exception: The exception that occurred + context: Optional context about which tool failed (for log correlation) Raises: ToolError: For known LinkedIn exception types Exception: Re-raises unknown exceptions as-is """ + ctx = f" in {context}" if context else "" + if isinstance(exception, CredentialsNotFoundError): - logger.warning("Credentials not found: %s", exception) + logger.warning("Credentials not found%s: %s", ctx, exception) raise ToolError( "Authentication not found. Run with --login to create a browser profile." ) from exception elif isinstance(exception, SessionExpiredError): - logger.warning("Session expired: %s", exception) + logger.warning("Session expired%s: %s", ctx, exception) raise ToolError( "Session expired. Run with --login to create a new browser profile." ) from exception elif isinstance(exception, AuthenticationError): - logger.warning("Authentication failed: %s", exception) + logger.warning("Authentication failed%s: %s", ctx, exception) raise ToolError( "Authentication failed. Run with --login to re-authenticate." ) from exception elif isinstance(exception, RateLimitError): wait_time = getattr(exception, "suggested_wait_time", 300) - logger.warning("Rate limit: %s (wait=%ds)", exception, wait_time) + logger.warning("Rate limit%s: %s (wait=%ds)", ctx, exception, wait_time) raise ToolError( f"Rate limit detected. Wait {wait_time} seconds before trying again." ) from exception elif isinstance(exception, ProfileNotFoundError): - logger.warning("Profile not found: %s", exception) + logger.warning("Profile not found%s: %s", ctx, exception) raise ToolError( "Profile not found. Check the profile URL is correct." ) from exception elif isinstance(exception, ElementNotFoundError): - logger.warning("Element not found: %s", exception) + logger.warning("Element not found%s: %s", ctx, exception) raise ToolError( "Element not found. LinkedIn page structure may have changed." ) from exception elif isinstance(exception, NetworkError): - logger.warning("Network error: %s", exception) + logger.warning("Network error%s: %s", ctx, exception) raise ToolError( "Network error. Check your connection and try again." ) from exception elif isinstance(exception, ScrapingError): - logger.warning("Scraping error: %s", exception) + logger.warning("Scraping error%s: %s", ctx, exception) raise ToolError( "Scraping failed. LinkedIn page structure may have changed." ) from exception elif isinstance(exception, (LinkedInScraperException, LinkedInMCPError)): - logger.warning("LinkedIn error: %s", exception) + logger.warning("LinkedIn error%s: %s", ctx, exception) raise ToolError(str(exception)) from exception else: - logger.error("Unexpected error: %s", exception, exc_info=True) + logger.error("Unexpected error%s: %s", ctx, exception, exc_info=True) raise exception diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index ae4ebb2c..eb021d04 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -49,6 +49,6 @@ async def close_session() -> Dict[str, Any]: "message": "Successfully closed the browser session and cleaned up resources", } except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "close_session") return mcp diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 9797a7dc..435702c1 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -82,7 +82,7 @@ async def get_company_profile( return result except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "get_company_profile") @mcp.tool( annotations=ToolAnnotations( @@ -134,4 +134,4 @@ async def get_company_posts( } except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "get_company_posts") diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index c9e4005a..82cd71ab 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -62,7 +62,7 @@ async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: return result except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "get_job_details") @mcp.tool( annotations=ToolAnnotations( @@ -112,4 +112,4 @@ async def search_jobs( return result except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "search_jobs") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 90f8c2d9..40adf115 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -83,7 +83,7 @@ async def get_person_profile( return result except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "get_person_profile") @mcp.tool( annotations=ToolAnnotations( @@ -133,4 +133,4 @@ async def search_people( return result except Exception as e: - raise_tool_error(e) + raise_tool_error(e, "search_people") diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py index 8ca71b90..ee813804 100644 --- a/tests/test_error_handler.py +++ b/tests/test_error_handler.py @@ -53,6 +53,13 @@ def test_raises_tool_error_for_scraping_error(): raise_tool_error(ScrapingError("bad html")) +def test_raises_tool_error_for_base_scraper_exception(): + from linkedin_mcp_server.core.exceptions import LinkedInScraperException + + with pytest.raises(ToolError, match="generic scraper error"): + raise_tool_error(LinkedInScraperException("generic scraper error")) + + def test_raises_tool_error_for_linkedin_mcp_error(): with pytest.raises(ToolError, match="custom mcp error"): raise_tool_error(LinkedInMCPError("custom mcp error")) From fc2df3feea41fed602f2e20635996d2489347306 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 22:52:54 +0100 Subject: [PATCH 428/565] style(error-handler): Add clarifying comments Add catch-all comment on base exception branch and NoReturn inline comments on all raise_tool_error() call sites. --- linkedin_mcp_server/error_handler.py | 2 ++ linkedin_mcp_server/server.py | 2 +- linkedin_mcp_server/tools/company.py | 4 ++-- linkedin_mcp_server/tools/job.py | 4 ++-- linkedin_mcp_server/tools/person.py | 4 ++-- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index a40331cd..60781a6a 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -97,6 +97,8 @@ def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: ) from exception elif isinstance(exception, (LinkedInScraperException, LinkedInMCPError)): + # Catch-all for base exception types and any future subclasses + # without a dedicated handler above. Passes through str(exception). logger.warning("LinkedIn error%s: %s", ctx, exception) raise ToolError(str(exception)) from exception diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index eb021d04..305694da 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -49,6 +49,6 @@ async def close_session() -> Dict[str, Any]: "message": "Successfully closed the browser session and cleaned up resources", } except Exception as e: - raise_tool_error(e, "close_session") + raise_tool_error(e, "close_session") # NoReturn return mcp diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 435702c1..f3a28f03 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -82,7 +82,7 @@ async def get_company_profile( return result except Exception as e: - raise_tool_error(e, "get_company_profile") + raise_tool_error(e, "get_company_profile") # NoReturn @mcp.tool( annotations=ToolAnnotations( @@ -134,4 +134,4 @@ async def get_company_posts( } except Exception as e: - raise_tool_error(e, "get_company_posts") + raise_tool_error(e, "get_company_posts") # NoReturn diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 82cd71ab..6764d15e 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -62,7 +62,7 @@ async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: return result except Exception as e: - raise_tool_error(e, "get_job_details") + raise_tool_error(e, "get_job_details") # NoReturn @mcp.tool( annotations=ToolAnnotations( @@ -112,4 +112,4 @@ async def search_jobs( return result except Exception as e: - raise_tool_error(e, "search_jobs") + raise_tool_error(e, "search_jobs") # NoReturn diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 40adf115..9f338a2d 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -83,7 +83,7 @@ async def get_person_profile( return result except Exception as e: - raise_tool_error(e, "get_person_profile") + raise_tool_error(e, "get_person_profile") # NoReturn @mcp.tool( annotations=ToolAnnotations( @@ -133,4 +133,4 @@ async def search_people( return result except Exception as e: - raise_tool_error(e, "search_people") + raise_tool_error(e, "search_people") # NoReturn From e4f33b5a32462ae383df2d76b8fc3dd680fa9029 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Wed, 4 Mar 2026 23:45:53 +0100 Subject: [PATCH 429/565] refactor(tools): Use Depends() to inject extractor Replace repeated ensure_authenticated/get_or_create_browser/ LinkedInExtractor boilerplate in all 6 tool functions with FastMCP Depends()-based dependency injection via a single get_extractor() factory in dependencies.py. Resolves: #186 --- AGENTS.md | 6 ++ linkedin_mcp_server/dependencies.py | 14 +++ linkedin_mcp_server/tools/__init__.py | 1 + linkedin_mcp_server/tools/company.py | 18 +--- linkedin_mcp_server/tools/job.py | 23 ++--- linkedin_mcp_server/tools/person.py | 18 +--- tests/test_tools.py | 124 ++++++++------------------ 7 files changed, 73 insertions(+), 131 deletions(-) create mode 100644 linkedin_mcp_server/dependencies.py diff --git a/AGENTS.md b/AGENTS.md index 350d94ae..353367ff 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -80,6 +80,12 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec - `auth.py` - `is_logged_in()`, `wait_for_manual_login()`, `warm_up_browser()` - `utils.py` - `detect_rate_limit()`, `scroll_to_bottom()`, `handle_modal_close()` +**Dependency Injection (`dependencies.py`):** + +- `get_extractor()` โ€” async factory that runs `ensure_authenticated()`, acquires the singleton browser, and returns a `LinkedInExtractor` +- Injected into tool functions via `Depends(get_extractor)` (hidden from MCP tool schema) +- No cleanup needed โ€” browser lifecycle is managed by the server lifespan + **Authentication Flow:** - Uses persistent browser profile at `~/.linkedin-mcp/profile/` diff --git a/linkedin_mcp_server/dependencies.py b/linkedin_mcp_server/dependencies.py new file mode 100644 index 00000000..ceeaa0fa --- /dev/null +++ b/linkedin_mcp_server/dependencies.py @@ -0,0 +1,14 @@ +"""Dependency injection factories for MCP tools.""" + +from linkedin_mcp_server.drivers.browser import ( + ensure_authenticated, + get_or_create_browser, +) +from linkedin_mcp_server.scraping import LinkedInExtractor + + +async def get_extractor() -> LinkedInExtractor: + """Authenticate, acquire the singleton browser, and return a ready extractor.""" + await ensure_authenticated() + browser = await get_or_create_browser() + return LinkedInExtractor(browser.page) diff --git a/linkedin_mcp_server/tools/__init__.py b/linkedin_mcp_server/tools/__init__.py index 52bb04d7..5852ca2a 100644 --- a/linkedin_mcp_server/tools/__init__.py +++ b/linkedin_mcp_server/tools/__init__.py @@ -13,6 +13,7 @@ Architecture: - FastMCP integration for MCP-compliant tool registration +- Depends()-based dependency injection for browser/extractor setup - ToolError-based error handling through centralized raise_tool_error() - Singleton driver pattern for session persistence - Structured data return format for consistent MCP responses diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index f3a28f03..17f739e9 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -9,12 +9,10 @@ from typing import Any from fastmcp import Context, FastMCP +from fastmcp.dependencies import Depends from mcp.types import ToolAnnotations -from linkedin_mcp_server.drivers.browser import ( - ensure_authenticated, - get_or_create_browser, -) +from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections @@ -36,6 +34,7 @@ async def get_company_profile( company_name: str, ctx: Context, sections: str | None = None, + extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Get a specific company's LinkedIn profile. @@ -55,8 +54,6 @@ async def get_company_profile( The LLM should parse the raw text in each section. """ try: - await ensure_authenticated() - requested, unknown = parse_company_sections(sections) logger.info( @@ -65,9 +62,6 @@ async def get_company_profile( sections, ) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting company profile scrape" ) @@ -95,6 +89,7 @@ async def get_company_profile( async def get_company_posts( company_name: str, ctx: Context, + extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Get recent posts from a company's LinkedIn feed. @@ -108,13 +103,8 @@ async def get_company_posts( The LLM should parse the raw text to extract individual posts. """ try: - await ensure_authenticated() - logger.info("Scraping company posts: %s", company_name) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting company posts scrape" ) diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 6764d15e..1a4513cd 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -8,12 +8,10 @@ from typing import Any from fastmcp import Context, FastMCP +from fastmcp.dependencies import Depends from mcp.types import ToolAnnotations -from linkedin_mcp_server.drivers.browser import ( - ensure_authenticated, - get_or_create_browser, -) +from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor @@ -31,7 +29,11 @@ def register_job_tools(mcp: FastMCP) -> None: openWorldHint=True, ) ) - async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: + async def get_job_details( + job_id: str, + ctx: Context, + extractor: LinkedInExtractor = Depends(get_extractor), + ) -> dict[str, Any]: """ Get job details for a specific job posting on LinkedIn. @@ -44,13 +46,8 @@ async def get_job_details(job_id: str, ctx: Context) -> dict[str, Any]: The LLM should parse the raw text to extract job details. """ try: - await ensure_authenticated() - logger.info("Scraping job: %s", job_id) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting job scrape" ) @@ -76,6 +73,7 @@ async def search_jobs( keywords: str, ctx: Context, location: str | None = None, + extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Search for jobs on LinkedIn. @@ -90,17 +88,12 @@ async def search_jobs( The LLM should parse the raw text to extract job listings. """ try: - await ensure_authenticated() - logger.info( "Searching jobs: keywords='%s', location='%s'", keywords, location, ) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting job search" ) diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 9f338a2d..690c2ebe 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -9,12 +9,10 @@ from typing import Any from fastmcp import Context, FastMCP +from fastmcp.dependencies import Depends from mcp.types import ToolAnnotations -from linkedin_mcp_server.drivers.browser import ( - ensure_authenticated, - get_or_create_browser, -) +from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_person_sections @@ -36,6 +34,7 @@ async def get_person_profile( linkedin_username: str, ctx: Context, sections: str | None = None, + extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Get a specific person's LinkedIn profile. @@ -56,8 +55,6 @@ async def get_person_profile( The LLM should parse the raw text in each section. """ try: - await ensure_authenticated() - requested, unknown = parse_person_sections(sections) logger.info( @@ -66,9 +63,6 @@ async def get_person_profile( sections, ) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting person profile scrape" ) @@ -97,6 +91,7 @@ async def search_people( keywords: str, ctx: Context, location: str | None = None, + extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Search for people on LinkedIn. @@ -111,17 +106,12 @@ async def search_people( The LLM should parse the raw text to extract individual people and their profiles. """ try: - await ensure_authenticated() - logger.info( "Searching people: keywords='%s', location='%s'", keywords, location, ) - browser = await get_or_create_browser() - extractor = LinkedInExtractor(browser.page) - await ctx.report_progress( progress=0, total=100, message="Starting people search" ) diff --git a/tests/test_tools.py b/tests/test_tools.py index 338df459..7ad1fe09 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -15,24 +15,6 @@ async def get_tool_fn( return tool.fn # type: ignore[attr-defined] -@pytest.fixture -def patch_tool_deps(monkeypatch): - """Patch ensure_authenticated and get_or_create_browser for all tools.""" - mock_browser = MagicMock() - mock_browser.page = MagicMock() - - for module in ["person", "company", "job"]: - monkeypatch.setattr( - f"linkedin_mcp_server.tools.{module}.ensure_authenticated", AsyncMock() - ) - monkeypatch.setattr( - f"linkedin_mcp_server.tools.{module}.get_or_create_browser", - AsyncMock(return_value=mock_browser), - ) - - return mock_browser - - def _make_mock_extractor(scrape_result: dict) -> MagicMock: """Create a mock LinkedInExtractor that returns the given result.""" mock = MagicMock() @@ -46,18 +28,12 @@ def _make_mock_extractor(scrape_result: dict) -> MagicMock: class TestPersonTool: - async def test_get_person_profile_success( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_get_person_profile_success(self, mock_context): expected = { "url": "https://www.linkedin.com/in/test-user/", "sections": {"main_profile": "John Doe\nSoftware Engineer"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.person.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.person import register_person_tools @@ -65,15 +41,13 @@ async def test_get_person_profile_success( register_person_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_person_profile") - result = await tool_fn("test-user", mock_context) + result = await tool_fn("test-user", mock_context, extractor=mock_extractor) assert result["url"] == "https://www.linkedin.com/in/test-user/" assert "main_profile" in result["sections"] assert "pages_visited" not in result assert "sections_requested" not in result - async def test_get_person_profile_with_sections( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_get_person_profile_with_sections(self, mock_context): """Verify sections parameter is passed through.""" expected = { "url": "https://www.linkedin.com/in/test-user/", @@ -84,10 +58,6 @@ async def test_get_person_profile_with_sections( }, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.person.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.person import register_person_tools @@ -96,7 +66,10 @@ async def test_get_person_profile_with_sections( tool_fn = await get_tool_fn(mcp, "get_person_profile") result = await tool_fn( - "test-user", mock_context, sections="experience,contact_info" + "test-user", + mock_context, + sections="experience,contact_info", + extractor=mock_extractor, ) assert "main_profile" in result["sections"] assert "experience" in result["sections"] @@ -108,18 +81,12 @@ async def test_get_person_profile_with_sections( assert "experience" in call_args[0][1] assert "contact_info" in call_args[0][1] - async def test_get_person_profile_unknown_section( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_get_person_profile_unknown_section(self, mock_context): expected = { "url": "https://www.linkedin.com/in/test-user/", "sections": {"main_profile": "John Doe"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.person.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.person import register_person_tools @@ -127,18 +94,21 @@ async def test_get_person_profile_unknown_section( register_person_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_person_profile") - result = await tool_fn("test-user", mock_context, sections="bogus_section") + result = await tool_fn( + "test-user", + mock_context, + sections="bogus_section", + extractor=mock_extractor, + ) assert result["unknown_sections"] == ["bogus_section"] - async def test_get_person_profile_error(self, mock_context, monkeypatch): + async def test_get_person_profile_error(self, mock_context): from fastmcp.exceptions import ToolError from linkedin_mcp_server.exceptions import SessionExpiredError - monkeypatch.setattr( - "linkedin_mcp_server.tools.person.ensure_authenticated", - AsyncMock(side_effect=SessionExpiredError()), - ) + mock_extractor = MagicMock() + mock_extractor.scrape_person = AsyncMock(side_effect=SessionExpiredError()) from linkedin_mcp_server.tools.person import register_person_tools @@ -147,18 +117,14 @@ async def test_get_person_profile_error(self, mock_context, monkeypatch): tool_fn = await get_tool_fn(mcp, "get_person_profile") with pytest.raises(ToolError, match="Session expired"): - await tool_fn("test-user", mock_context) + await tool_fn("test-user", mock_context, extractor=mock_extractor) - async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): + async def test_search_people(self, mock_context): expected = { "url": "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York", "sections": {"search_results": "Jane Doe\nAI Engineer at Acme\nNew York"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.person.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.person import register_person_tools @@ -166,25 +132,21 @@ async def test_search_people(self, mock_context, patch_tool_deps, monkeypatch): register_person_tools(mcp) tool_fn = await get_tool_fn(mcp, "search_people") - result = await tool_fn("AI engineer", mock_context, location="New York") + result = await tool_fn( + "AI engineer", mock_context, location="New York", extractor=mock_extractor + ) assert "search_results" in result["sections"] assert "pages_visited" not in result mock_extractor.search_people.assert_awaited_once_with("AI engineer", "New York") class TestCompanyTools: - async def test_get_company_profile( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_get_company_profile(self, mock_context): expected = { "url": "https://www.linkedin.com/company/testcorp/", "sections": {"about": "TestCorp\nWe build things"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.company.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.company import register_company_tools @@ -192,22 +154,16 @@ async def test_get_company_profile( register_company_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_company_profile") - result = await tool_fn("testcorp", mock_context) + result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) assert "about" in result["sections"] assert "pages_visited" not in result - async def test_get_company_profile_unknown_section( - self, mock_context, patch_tool_deps, monkeypatch - ): + async def test_get_company_profile_unknown_section(self, mock_context): expected = { "url": "https://www.linkedin.com/company/testcorp/", "sections": {"about": "TestCorp\nWe build things"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.company.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.company import register_company_tools @@ -215,16 +171,14 @@ async def test_get_company_profile_unknown_section( register_company_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_company_profile") - result = await tool_fn("testcorp", mock_context, sections="bogus") + result = await tool_fn( + "testcorp", mock_context, sections="bogus", extractor=mock_extractor + ) assert result["unknown_sections"] == ["bogus"] - async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatch): + async def test_get_company_posts(self, mock_context): mock_extractor = MagicMock() mock_extractor.extract_page = AsyncMock(return_value="Post 1\nPost 2") - monkeypatch.setattr( - "linkedin_mcp_server.tools.company.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.company import register_company_tools @@ -232,7 +186,7 @@ async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatc register_company_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_company_posts") - result = await tool_fn("testcorp", mock_context) + result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) assert "posts" in result["sections"] assert result["sections"]["posts"] == "Post 1\nPost 2" assert "pages_visited" not in result @@ -240,16 +194,12 @@ async def test_get_company_posts(self, mock_context, patch_tool_deps, monkeypatc class TestJobTools: - async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch): + async def test_get_job_details(self, mock_context): expected = { "url": "https://www.linkedin.com/jobs/view/12345/", "sections": {"job_posting": "Software Engineer\nGreat opportunity"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.job.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.job import register_job_tools @@ -257,20 +207,16 @@ async def test_get_job_details(self, mock_context, patch_tool_deps, monkeypatch) register_job_tools(mcp) tool_fn = await get_tool_fn(mcp, "get_job_details") - result = await tool_fn("12345", mock_context) + result = await tool_fn("12345", mock_context, extractor=mock_extractor) assert "job_posting" in result["sections"] assert "pages_visited" not in result - async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): + async def test_search_jobs(self, mock_context): expected = { "url": "https://www.linkedin.com/jobs/search/?keywords=python", "sections": {"search_results": "Job 1\nJob 2"}, } mock_extractor = _make_mock_extractor(expected) - monkeypatch.setattr( - "linkedin_mcp_server.tools.job.LinkedInExtractor", - lambda *a, **kw: mock_extractor, - ) from linkedin_mcp_server.tools.job import register_job_tools @@ -278,6 +224,8 @@ async def test_search_jobs(self, mock_context, patch_tool_deps, monkeypatch): register_job_tools(mcp) tool_fn = await get_tool_fn(mcp, "search_jobs") - result = await tool_fn("python", mock_context, location="Remote") + result = await tool_fn( + "python", mock_context, location="Remote", extractor=mock_extractor + ) assert "search_results" in result["sections"] assert "pages_visited" not in result From 8e30fd5f1432ee6efc7739d93d26ea9d307e53e8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 00:58:57 +0100 Subject: [PATCH 430/565] fix(dependencies): Enhance error handling in get_extractor Updated the get_extractor function to route errors through raise_tool_error, ensuring that MCP clients receive structured ToolError responses for authentication failures. Added a test to verify that authentication errors are correctly handled and produce the expected ToolError response. --- linkedin_mcp_server/dependencies.py | 16 ++++++++++++---- tests/test_tools.py | 25 +++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/dependencies.py b/linkedin_mcp_server/dependencies.py index ceeaa0fa..d8f2607b 100644 --- a/linkedin_mcp_server/dependencies.py +++ b/linkedin_mcp_server/dependencies.py @@ -4,11 +4,19 @@ ensure_authenticated, get_or_create_browser, ) +from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor async def get_extractor() -> LinkedInExtractor: - """Authenticate, acquire the singleton browser, and return a ready extractor.""" - await ensure_authenticated() - browser = await get_or_create_browser() - return LinkedInExtractor(browser.page) + """Authenticate, acquire the singleton browser, and return a ready extractor. + + Errors are routed through raise_tool_error() so MCP clients receive + the same structured ToolError responses as tool-level exceptions. + """ + try: + browser = await get_or_create_browser() + await ensure_authenticated() + return LinkedInExtractor(browser.page) + except Exception as e: + raise_tool_error(e, "get_extractor") # NoReturn diff --git a/tests/test_tools.py b/tests/test_tools.py index 7ad1fe09..76a68cc4 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -119,6 +119,31 @@ async def test_get_person_profile_error(self, mock_context): with pytest.raises(ToolError, match="Session expired"): await tool_fn("test-user", mock_context, extractor=mock_extractor) + async def test_get_person_profile_auth_error(self, monkeypatch): + """Auth failures in the DI layer produce proper ToolError responses.""" + from fastmcp.exceptions import ToolError + + from linkedin_mcp_server.core.exceptions import AuthenticationError + + mock_browser = MagicMock() + mock_browser.page = MagicMock() + monkeypatch.setattr( + "linkedin_mcp_server.dependencies.get_or_create_browser", + AsyncMock(return_value=mock_browser), + ) + monkeypatch.setattr( + "linkedin_mcp_server.dependencies.ensure_authenticated", + AsyncMock(side_effect=AuthenticationError("Session expired or invalid.")), + ) + + from linkedin_mcp_server.tools.person import register_person_tools + + mcp = FastMCP("test") + register_person_tools(mcp) + + with pytest.raises(ToolError, match="Authentication failed"): + await mcp.call_tool("get_person_profile", {"linkedin_username": "test"}) + async def test_search_people(self, mock_context): expected = { "url": "https://www.linkedin.com/search/results/people/?keywords=AI+engineer&location=New+York", From 694dd88eb451a9ea6ce9cbddb8dadccf8b06d082 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 01:07:56 +0100 Subject: [PATCH 431/565] style(deps): Soften get_extractor docstring --- linkedin_mcp_server/dependencies.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/dependencies.py b/linkedin_mcp_server/dependencies.py index d8f2607b..2a071c89 100644 --- a/linkedin_mcp_server/dependencies.py +++ b/linkedin_mcp_server/dependencies.py @@ -11,8 +11,8 @@ async def get_extractor() -> LinkedInExtractor: """Authenticate, acquire the singleton browser, and return a ready extractor. - Errors are routed through raise_tool_error() so MCP clients receive - the same structured ToolError responses as tool-level exceptions. + Known LinkedIn exceptions are converted to structured ToolError responses + via raise_tool_error(); unexpected exceptions propagate as-is. """ try: browser = await get_or_create_browser() From 5bf48f4dae3a2873f4187473dab297552b0d3bc4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 01:22:29 +0100 Subject: [PATCH 432/565] docs(deps): Fix operation order in docstring --- AGENTS.md | 2 +- linkedin_mcp_server/dependencies.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 353367ff..94900c58 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -82,7 +82,7 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec **Dependency Injection (`dependencies.py`):** -- `get_extractor()` โ€” async factory that runs `ensure_authenticated()`, acquires the singleton browser, and returns a `LinkedInExtractor` +- `get_extractor()` โ€” async factory that acquires the singleton browser, runs `ensure_authenticated()`, and returns a `LinkedInExtractor` - Injected into tool functions via `Depends(get_extractor)` (hidden from MCP tool schema) - No cleanup needed โ€” browser lifecycle is managed by the server lifespan diff --git a/linkedin_mcp_server/dependencies.py b/linkedin_mcp_server/dependencies.py index 2a071c89..d6c0bda4 100644 --- a/linkedin_mcp_server/dependencies.py +++ b/linkedin_mcp_server/dependencies.py @@ -9,7 +9,7 @@ async def get_extractor() -> LinkedInExtractor: - """Authenticate, acquire the singleton browser, and return a ready extractor. + """Acquire the singleton browser, authenticate, and return a ready extractor. Known LinkedIn exceptions are converted to structured ToolError responses via raise_tool_error(); unexpected exceptions propagate as-is. From 6ec04e06eed7c51f6df9fbe5ecd8f30e64d77a28 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 01:38:49 +0100 Subject: [PATCH 433/565] chore(config): Update model and provider settings in btca.config.jsonc --- btca.config.jsonc | 152 +++++++++++++++++++++++----------------------- 1 file changed, 76 insertions(+), 76 deletions(-) diff --git a/btca.config.jsonc b/btca.config.jsonc index 27c1236c..269e4202 100644 --- a/btca.config.jsonc +++ b/btca.config.jsonc @@ -1,78 +1,78 @@ { - "$schema": "https://btca.dev/btca.schema.json", - "resources": [ - { - "name": "fastmcp", - "type": "git", - "url": "https://github.com/jlowin/fastmcp", - "branch": "main", - "specialNotes": "FastMCP server framework. Primary MCP library used in this project." - }, - { - "name": "playwright", - "type": "git", - "url": "https://github.com/microsoft/playwright-python", - "branch": "main", - "specialNotes": "Playwright Python bindings for browser automation." - }, - { - "name": "pytest", - "type": "git", - "url": "https://github.com/pytest-dev/pytest", - "branch": "main", - "specialNotes": "Python testing framework." - }, - { - "name": "ruff", - "type": "git", - "url": "https://github.com/astral-sh/ruff", - "branch": "main", - "specialNotes": "Fast Python linter and formatter written in Rust." - }, - { - "name": "ty", - "type": "git", - "url": "https://github.com/astral-sh/ty", - "branch": "main", - "specialNotes": "Fast Python type checker from Astral, written in Rust." - }, - { - "name": "uv", - "type": "git", - "url": "https://github.com/astral-sh/uv", - "branch": "main", - "specialNotes": "Fast Python package manager from Astral, written in Rust." - }, - { - "name": "inquirer", - "type": "git", - "url": "https://github.com/magmax/python-inquirer", - "branch": "master", - "specialNotes": "Python library for CLI interactive prompts." - }, - { - "name": "pythonDotenv", - "type": "git", - "url": "https://github.com/theskumar/python-dotenv", - "branch": "main", - "specialNotes": "Python library for loading .env files." - }, - { - "name": "pyperclip", - "type": "git", - "url": "https://github.com/asweigart/pyperclip", - "branch": "master", - "specialNotes": "Cross-platform Python clipboard module." - }, - { - "name": "preCommit", - "type": "git", - "url": "https://github.com/pre-commit/pre-commit", - "branch": "main", - "specialNotes": "Framework for managing pre-commit hooks." - } - ], - "model": "claude-haiku-4-5", - "provider": "anthropic", - "providerTimeoutMs": 300000 + "$schema": "https://btca.dev/btca.schema.json", + "providerTimeoutMs": 300000, + "resources": [ + { + "type": "git", + "name": "fastmcp", + "url": "https://github.com/jlowin/fastmcp", + "branch": "main", + "specialNotes": "FastMCP server framework. Primary MCP library used in this project." + }, + { + "type": "git", + "name": "playwright", + "url": "https://github.com/microsoft/playwright-python", + "branch": "main", + "specialNotes": "Playwright Python bindings for browser automation." + }, + { + "type": "git", + "name": "pytest", + "url": "https://github.com/pytest-dev/pytest", + "branch": "main", + "specialNotes": "Python testing framework." + }, + { + "type": "git", + "name": "ruff", + "url": "https://github.com/astral-sh/ruff", + "branch": "main", + "specialNotes": "Fast Python linter and formatter written in Rust." + }, + { + "type": "git", + "name": "ty", + "url": "https://github.com/astral-sh/ty", + "branch": "main", + "specialNotes": "Fast Python type checker from Astral, written in Rust." + }, + { + "type": "git", + "name": "uv", + "url": "https://github.com/astral-sh/uv", + "branch": "main", + "specialNotes": "Fast Python package manager from Astral, written in Rust." + }, + { + "type": "git", + "name": "inquirer", + "url": "https://github.com/magmax/python-inquirer", + "branch": "master", + "specialNotes": "Python library for CLI interactive prompts." + }, + { + "type": "git", + "name": "pythonDotenv", + "url": "https://github.com/theskumar/python-dotenv", + "branch": "main", + "specialNotes": "Python library for loading .env files." + }, + { + "type": "git", + "name": "pyperclip", + "url": "https://github.com/asweigart/pyperclip", + "branch": "master", + "specialNotes": "Cross-platform Python clipboard module." + }, + { + "type": "git", + "name": "preCommit", + "url": "https://github.com/pre-commit/pre-commit", + "branch": "main", + "specialNotes": "Framework for managing pre-commit hooks." + } + ], + "model": "claude-haiku-4.5", + "provider": "github-copilot" } From c5bf5541396f8f77f05b3fe440768af612996f28 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 01:57:34 +0100 Subject: [PATCH 434/565] refactor(tools): Simplify annotations to dict syntax and add tags Replace ToolAnnotations(...) with plain dicts, move title to top-level @mcp.tool() param, and add category tags to all tools. Resolves: #189 --- linkedin_mcp_server/server.py | 6 +++++- linkedin_mcp_server/tools/company.py | 19 ++++++------------- linkedin_mcp_server/tools/job.py | 19 ++++++------------- linkedin_mcp_server/tools/person.py | 19 ++++++------------- 4 files changed, 23 insertions(+), 40 deletions(-) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 305694da..3c0f0469 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -39,7 +39,11 @@ def create_mcp_server() -> FastMCP: register_job_tools(mcp) # Register session management tool - @mcp.tool() + @mcp.tool( + title="Close Session", + annotations={"destructiveHint": True}, + tags={"session"}, + ) async def close_session() -> Dict[str, Any]: """Close the current browser session and clean up resources.""" try: diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 17f739e9..8dc42421 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -10,7 +10,6 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends -from mcp.types import ToolAnnotations from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error @@ -23,12 +22,9 @@ def register_company_tools(mcp: FastMCP) -> None: """Register all company-related tools with the MCP server.""" @mcp.tool( - annotations=ToolAnnotations( - title="Get Company Profile", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Get Company Profile", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"company", "scraping"}, ) async def get_company_profile( company_name: str, @@ -79,12 +75,9 @@ async def get_company_profile( raise_tool_error(e, "get_company_profile") # NoReturn @mcp.tool( - annotations=ToolAnnotations( - title="Get Company Posts", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Get Company Posts", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"company", "scraping"}, ) async def get_company_posts( company_name: str, diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 1a4513cd..5700c6e4 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -9,7 +9,6 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends -from mcp.types import ToolAnnotations from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error @@ -22,12 +21,9 @@ def register_job_tools(mcp: FastMCP) -> None: """Register all job-related tools with the MCP server.""" @mcp.tool( - annotations=ToolAnnotations( - title="Get Job Details", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Get Job Details", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"job", "scraping"}, ) async def get_job_details( job_id: str, @@ -62,12 +58,9 @@ async def get_job_details( raise_tool_error(e, "get_job_details") # NoReturn @mcp.tool( - annotations=ToolAnnotations( - title="Search Jobs", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Search Jobs", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"job", "search"}, ) async def search_jobs( keywords: str, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 690c2ebe..833e1549 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -10,7 +10,6 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends -from mcp.types import ToolAnnotations from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error @@ -23,12 +22,9 @@ def register_person_tools(mcp: FastMCP) -> None: """Register all person-related tools with the MCP server.""" @mcp.tool( - annotations=ToolAnnotations( - title="Get Person Profile", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Get Person Profile", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"person", "scraping"}, ) async def get_person_profile( linkedin_username: str, @@ -80,12 +76,9 @@ async def get_person_profile( raise_tool_error(e, "get_person_profile") # NoReturn @mcp.tool( - annotations=ToolAnnotations( - title="Search People", - readOnlyHint=True, - destructiveHint=False, - openWorldHint=True, - ) + title="Search People", + annotations={"readOnlyHint": True, "openWorldHint": True}, + tags={"person", "search"}, ) async def search_people( keywords: str, From 178601fc6b6c5c98f855aefa820155c6b685848f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:19:12 +0100 Subject: [PATCH 435/565] refactor(server): Split lifespan into composable browser + auth lifespans --- linkedin_mcp_server/server.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 3c0f0469..5c7c0596 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -6,11 +6,12 @@ """ import logging -from contextlib import asynccontextmanager from typing import Any, AsyncIterator, Dict from fastmcp import FastMCP +from fastmcp.server.lifespan import lifespan +from linkedin_mcp_server.authentication import get_authentication_source from linkedin_mcp_server.drivers.browser import close_browser from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.tools.company import register_company_tools @@ -20,18 +21,29 @@ logger = logging.getLogger(__name__) -@asynccontextmanager -async def lifespan(app: FastMCP) -> AsyncIterator[None]: - """Manage server lifecycle - cleanup browser on shutdown.""" +@lifespan +async def browser_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: + """Manage browser lifecycle โ€” cleanup on shutdown.""" logger.info("LinkedIn MCP Server starting...") - yield + yield {} logger.info("LinkedIn MCP Server shutting down...") await close_browser() +@lifespan +async def auth_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: + """Validate authentication profile exists at startup.""" + get_authentication_source() + yield {} + + def create_mcp_server() -> FastMCP: """Create and configure the MCP server with all LinkedIn tools.""" - mcp = FastMCP("linkedin_scraper", lifespan=lifespan, mask_error_details=True) + mcp = FastMCP( + "linkedin_scraper", + lifespan=auth_lifespan | browser_lifespan, + mask_error_details=True, + ) # Register all tools register_person_tools(mcp) From 64ac267bf54bf461d9de4f0dd282e9a32944c88b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:23:14 +0100 Subject: [PATCH 436/565] style(server): Address Greptile review feedback Use lowercase dict instead of Dict, add auth validation log line --- linkedin_mcp_server/server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 5c7c0596..d55babbf 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -6,7 +6,7 @@ """ import logging -from typing import Any, AsyncIterator, Dict +from typing import Any, AsyncIterator from fastmcp import FastMCP from fastmcp.server.lifespan import lifespan @@ -33,6 +33,7 @@ async def browser_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: @lifespan async def auth_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: """Validate authentication profile exists at startup.""" + logger.info("Validating LinkedIn authentication...") get_authentication_source() yield {} @@ -56,7 +57,7 @@ def create_mcp_server() -> FastMCP: annotations={"destructiveHint": True}, tags={"session"}, ) - async def close_session() -> Dict[str, Any]: + async def close_session() -> dict[str, Any]: """Close the current browser session and clean up resources.""" try: await close_browser() From 4e2843ea06efc0abbd94de72bcdca81917795c22 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:09:37 +0100 Subject: [PATCH 437/565] feat(tools): add global 60s tool timeouts # Conflicts: # linkedin_mcp_server/server.py # linkedin_mcp_server/tools/company.py # linkedin_mcp_server/tools/job.py # linkedin_mcp_server/tools/person.py --- AGENTS.md | 1 + README.md | 5 +++++ docs/docker-hub.md | 2 ++ linkedin_mcp_server/server.py | 1 + linkedin_mcp_server/tools/company.py | 2 ++ linkedin_mcp_server/tools/job.py | 2 ++ linkedin_mcp_server/tools/person.py | 2 ++ tests/test_tools.py | 22 ++++++++++++++++++++++ 8 files changed, 37 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index 94900c58..76fbd305 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -66,6 +66,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown section names are provided, an `unknown_sections: [name, ...]` key is also included. +All MCP tools use a fixed FastMCP execution timeout of `60` seconds. **Scraping Architecture (`scraping/`):** diff --git a/README.md b/README.md index d3bd6ca3..ec0d5cbb 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,11 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | +### Execution Timeouts + +- All MCP tools have a built-in FastMCP execution timeout of `60` seconds. +- This is separate from CLI/env `--timeout` / `TIMEOUT`, which controls browser page operation timeout in milliseconds. + > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 2a288bb7..7ad17bf7 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -59,6 +59,8 @@ uvx linkedin-scraper-mcp --login | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | | `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | +> **Note:** All MCP tools also use a fixed FastMCP execution timeout of `60` seconds. This is separate from the browser `TIMEOUT` env var above. + **Example with custom timeout:** ```json diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index d55babbf..42bbe2ad 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -53,6 +53,7 @@ def create_mcp_server() -> FastMCP: # Register session management tool @mcp.tool( + timeout=90.0, title="Close Session", annotations={"destructiveHint": True}, tags={"session"}, diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 8dc42421..88f7c020 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -22,6 +22,7 @@ def register_company_tools(mcp: FastMCP) -> None: """Register all company-related tools with the MCP server.""" @mcp.tool( + timeout=90.0, title="Get Company Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, @@ -75,6 +76,7 @@ async def get_company_profile( raise_tool_error(e, "get_company_profile") # NoReturn @mcp.tool( + timeout=90.0, title="Get Company Posts", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 5700c6e4..9b7d976e 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -21,6 +21,7 @@ def register_job_tools(mcp: FastMCP) -> None: """Register all job-related tools with the MCP server.""" @mcp.tool( + timeout=90.0, title="Get Job Details", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "scraping"}, @@ -58,6 +59,7 @@ async def get_job_details( raise_tool_error(e, "get_job_details") # NoReturn @mcp.tool( + timeout=90.0, title="Search Jobs", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "search"}, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 833e1549..015e5354 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -22,6 +22,7 @@ def register_person_tools(mcp: FastMCP) -> None: """Register all person-related tools with the MCP server.""" @mcp.tool( + timeout=90.0, title="Get Person Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "scraping"}, @@ -76,6 +77,7 @@ async def get_person_profile( raise_tool_error(e, "get_person_profile") # NoReturn @mcp.tool( + timeout=90.0, title="Search People", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "search"}, diff --git a/tests/test_tools.py b/tests/test_tools.py index 76a68cc4..292c7bd2 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -254,3 +254,25 @@ async def test_search_jobs(self, mock_context): ) assert "search_results" in result["sections"] assert "pages_visited" not in result + + +class TestToolTimeouts: + async def test_all_tools_have_global_timeout(self): + from linkedin_mcp_server.server import create_mcp_server + + mcp = create_mcp_server() + + tool_names = ( + "get_person_profile", + "search_people", + "get_company_profile", + "get_company_posts", + "get_job_details", + "search_jobs", + "close_session", + ) + + for name in tool_names: + tool = await mcp.get_tool(name) + assert tool is not None + assert tool.timeout == 60.0 From cb46d7d260b025d284221c1dd89721f67f1c9b8e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:10:25 +0100 Subject: [PATCH 438/565] fix(tools): raise global timeout to 90s # Conflicts: # linkedin_mcp_server/server.py # linkedin_mcp_server/tools/company.py # linkedin_mcp_server/tools/job.py # linkedin_mcp_server/tools/person.py --- AGENTS.md | 2 +- README.md | 2 +- docs/docker-hub.md | 2 +- tests/test_tools.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 76fbd305..84128769 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -66,7 +66,7 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown section names are provided, an `unknown_sections: [name, ...]` key is also included. -All MCP tools use a fixed FastMCP execution timeout of `60` seconds. +All MCP tools use a fixed FastMCP execution timeout of `90` seconds. **Scraping Architecture (`scraping/`):** diff --git a/README.md b/README.md index ec0d5cbb..a753615b 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company ### Execution Timeouts -- All MCP tools have a built-in FastMCP execution timeout of `60` seconds. +- All MCP tools have a built-in FastMCP execution timeout of `90` seconds. - This is separate from CLI/env `--timeout` / `TIMEOUT`, which controls browser page operation timeout in milliseconds. > [!IMPORTANT] diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 7ad17bf7..be36bd29 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -59,7 +59,7 @@ uvx linkedin-scraper-mcp --login | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | | `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | -> **Note:** All MCP tools also use a fixed FastMCP execution timeout of `60` seconds. This is separate from the browser `TIMEOUT` env var above. +> **Note:** All MCP tools also use a fixed FastMCP execution timeout of `90` seconds. This is separate from the browser `TIMEOUT` env var above. **Example with custom timeout:** diff --git a/tests/test_tools.py b/tests/test_tools.py index 292c7bd2..66d48b85 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -275,4 +275,4 @@ async def test_all_tools_have_global_timeout(self): for name in tool_names: tool = await mcp.get_tool(name) assert tool is not None - assert tool.timeout == 60.0 + assert tool.timeout == 90.0 From aecd9ea00420a304768239c67d03ae50e68bc77a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:43:48 +0100 Subject: [PATCH 439/565] refactor(tools): centralize tool timeout constant # Conflicts: # linkedin_mcp_server/server.py --- linkedin_mcp_server/constants.py | 3 +++ linkedin_mcp_server/server.py | 3 ++- linkedin_mcp_server/tools/company.py | 5 +++-- linkedin_mcp_server/tools/job.py | 5 +++-- linkedin_mcp_server/tools/person.py | 5 +++-- tests/test_tools.py | 3 ++- 6 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 linkedin_mcp_server/constants.py diff --git a/linkedin_mcp_server/constants.py b/linkedin_mcp_server/constants.py new file mode 100644 index 00000000..5f366d45 --- /dev/null +++ b/linkedin_mcp_server/constants.py @@ -0,0 +1,3 @@ +"""Project-wide constants.""" + +TOOL_TIMEOUT_SECONDS: float = 90.0 diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 42bbe2ad..e85f08f7 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -11,6 +11,7 @@ from fastmcp import FastMCP from fastmcp.server.lifespan import lifespan +from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.authentication import get_authentication_source from linkedin_mcp_server.drivers.browser import close_browser from linkedin_mcp_server.error_handler import raise_tool_error @@ -53,7 +54,7 @@ def create_mcp_server() -> FastMCP: # Register session management tool @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Close Session", annotations={"destructiveHint": True}, tags={"session"}, diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 88f7c020..b9539303 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -11,6 +11,7 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends +from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections @@ -22,7 +23,7 @@ def register_company_tools(mcp: FastMCP) -> None: """Register all company-related tools with the MCP server.""" @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Get Company Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, @@ -76,7 +77,7 @@ async def get_company_profile( raise_tool_error(e, "get_company_profile") # NoReturn @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Get Company Posts", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 9b7d976e..a7ab34ca 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -10,6 +10,7 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends +from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor @@ -21,7 +22,7 @@ def register_job_tools(mcp: FastMCP) -> None: """Register all job-related tools with the MCP server.""" @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Get Job Details", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "scraping"}, @@ -59,7 +60,7 @@ async def get_job_details( raise_tool_error(e, "get_job_details") # NoReturn @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Search Jobs", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "search"}, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 015e5354..e5ffa409 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -11,6 +11,7 @@ from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends +from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_person_sections @@ -22,7 +23,7 @@ def register_person_tools(mcp: FastMCP) -> None: """Register all person-related tools with the MCP server.""" @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Get Person Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "scraping"}, @@ -77,7 +78,7 @@ async def get_person_profile( raise_tool_error(e, "get_person_profile") # NoReturn @mcp.tool( - timeout=90.0, + timeout=TOOL_TIMEOUT_SECONDS, title="Search People", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "search"}, diff --git a/tests/test_tools.py b/tests/test_tools.py index 66d48b85..abc82154 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -258,6 +258,7 @@ async def test_search_jobs(self, mock_context): class TestToolTimeouts: async def test_all_tools_have_global_timeout(self): + from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.server import create_mcp_server mcp = create_mcp_server() @@ -275,4 +276,4 @@ async def test_all_tools_have_global_timeout(self): for name in tool_names: tool = await mcp.get_tool(name) assert tool is not None - assert tool.timeout == 90.0 + assert tool.timeout == TOOL_TIMEOUT_SECONDS From fed0ace14dc7f192f34182620f7d21428ddc1d98 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 02:39:50 +0100 Subject: [PATCH 440/565] docs: reduce timeout feature emphasis --- AGENTS.md | 1 - README.md | 5 ----- docs/docker-hub.md | 2 -- 3 files changed, 8 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 84128769..94900c58 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -66,7 +66,6 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown section names are provided, an `unknown_sections: [name, ...]` key is also included. -All MCP tools use a fixed FastMCP execution timeout of `90` seconds. **Scraping Architecture (`scraping/`):** diff --git a/README.md b/README.md index a753615b..d3bd6ca3 100644 --- a/README.md +++ b/README.md @@ -48,11 +48,6 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | -### Execution Timeouts - -- All MCP tools have a built-in FastMCP execution timeout of `90` seconds. -- This is separate from CLI/env `--timeout` / `TIMEOUT`, which controls browser page operation timeout in milliseconds. - > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 diff --git a/docs/docker-hub.md b/docs/docker-hub.md index be36bd29..2a288bb7 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -59,8 +59,6 @@ uvx linkedin-scraper-mcp --login | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | | `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | -> **Note:** All MCP tools also use a fixed FastMCP execution timeout of `90` seconds. This is separate from the browser `TIMEOUT` env var above. - **Example with custom timeout:** ```json From a43bb3af6c87ee7e38573f1fc73b09acddd40a41 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 15:02:45 +0100 Subject: [PATCH 441/565] feat(tools): add job IDs, sidebar scrolling, and pagination to search_jobs Extract job IDs from href attributes (the one thing innerText can't capture), scroll the job sidebar instead of the main page, and paginate through multiple result pages with dynamic offsets. Resolves: #195 --- linkedin_mcp_server/core/utils.py | 49 +++++++ linkedin_mcp_server/scraping/extractor.py | 105 +++++++++++++-- linkedin_mcp_server/tools/job.py | 13 +- tests/test_scraping.py | 157 ++++++++++++++++++++-- 4 files changed, 299 insertions(+), 25 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index eb52e623..9b01a0d7 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -104,6 +104,55 @@ async def scroll_to_bottom( break +async def scroll_job_sidebar( + page: Page, pause_time: float = 1.0, max_scrolls: int = 10 +) -> None: + """Scroll the job search sidebar to load all job cards. + + LinkedIn renders job search results in a scrollable sidebar container, + not the main page body. This function finds that container by locating + a job card link and walking up to its scrollable ancestor, then scrolls + it iteratively until no new content loads. + + Args: + page: Patchright page object + pause_time: Time to pause between scrolls (seconds) + max_scrolls: Maximum number of scroll attempts + """ + scrolled = await page.evaluate( + """async ({pauseTime, maxScrolls}) => { + const link = document.querySelector('a[href*="/jobs/view/"]'); + if (!link) return 0; + + let container = link.parentElement; + while (container && container !== document.body) { + const style = window.getComputedStyle(container); + const overflowY = style.overflowY; + if ((overflowY === 'auto' || overflowY === 'scroll') + && container.scrollHeight > container.clientHeight) { + break; + } + container = container.parentElement; + } + + if (!container || container === document.body) return 0; + + let scrollCount = 0; + for (let i = 0; i < maxScrolls; i++) { + const prevHeight = container.scrollHeight; + container.scrollTop = container.scrollHeight; + await new Promise(r => setTimeout(r, pauseTime * 1000)); + if (container.scrollHeight === prevHeight) break; + scrollCount++; + } + return scrollCount; + }""", + {"pauseTime": pause_time, "maxScrolls": max_scrolls}, + ) + if scrolled: + logger.debug("Scrolled job sidebar %d times", scrolled) + + async def handle_modal_close(page: Page) -> bool: """Close any popup modals that might be blocking content. diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 7634c9cf..fcb87f15 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -12,6 +12,7 @@ from linkedin_mcp_server.core.utils import ( detect_rate_limit, handle_modal_close, + scroll_job_sidebar, scroll_to_bottom, ) @@ -287,28 +288,112 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: "sections": sections, } + async def _extract_job_ids(self) -> list[str]: + """Extract unique job IDs from job card links on the current page. + + Finds all `a[href*="/jobs/view/"]` links and extracts the numeric + job ID from each href. Returns deduplicated IDs in DOM order. + """ + return await self._page.evaluate( + """() => { + const links = document.querySelectorAll('a[href*="/jobs/view/"]'); + const seen = new Set(); + const ids = []; + for (const a of links) { + const match = a.href.match(/\\/jobs\\/view\\/(\\d+)/); + if (match && !seen.has(match[1])) { + seen.add(match[1]); + ids.push(match[1]); + } + } + return ids; + }""" + ) + async def search_jobs( - self, keywords: str, location: str | None = None + self, + keywords: str, + location: str | None = None, + max_pages: int = 3, ) -> dict[str, Any]: - """Search for jobs and extract the results page. + """Search for jobs with pagination and job ID extraction. + + Scrolls the job sidebar (not the main page) and paginates through + results. Stops early when a page yields no new job IDs. + + Args: + keywords: Search keywords + location: Optional location filter + max_pages: Maximum pages to load (1-10, default 3) Returns: - {url, sections: {name: text}} + {url, sections: {search_results: text}, job_ids: [str]} """ + max_pages = max(1, min(10, max_pages)) + params = f"keywords={quote_plus(keywords)}" if location: params += f"&location={quote_plus(location)}" - url = f"https://www.linkedin.com/jobs/search/?{params}" - text = await self.extract_page(url) + base_url = f"https://www.linkedin.com/jobs/search/?{params}" + all_job_ids: list[str] = [] + seen_ids: set[str] = set() + page_texts: list[str] = [] - sections: dict[str, str] = {} - if text: - sections["search_results"] = text + for page_num in range(max_pages): + if page_num > 0: + await asyncio.sleep(_NAV_DELAY) + + url = base_url if page_num == 0 else f"{base_url}&start={len(seen_ids)}" + + try: + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + try: + await self._page.wait_for_selector("main", timeout=5000) + except PlaywrightTimeoutError: + logger.debug("No
element found on %s", url) + + await handle_modal_close(self._page) + await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) + + # Extract job IDs from hrefs + page_ids = await self._extract_job_ids() + new_ids = [jid for jid in page_ids if jid not in seen_ids] + + if not new_ids and page_num > 0: + logger.debug("No new job IDs on page %d, stopping", page_num + 1) + break + + for jid in new_ids: + seen_ids.add(jid) + all_job_ids.append(jid) + + # Extract innerText + raw = await self._page.evaluate( + """() => { + const main = document.querySelector('main'); + return main ? main.innerText : document.body.innerText; + }""" + ) + if raw: + cleaned = strip_linkedin_noise(raw) + if cleaned: + page_texts.append(cleaned) + + except LinkedInScraperException: + raise + except Exception as e: + logger.warning("Error on search page %d: %s", page_num + 1, e) + break return { - "url": url, - "sections": sections, + "url": base_url, + "sections": {"search_results": "\n---\n".join(page_texts)} + if page_texts + else {}, + "job_ids": all_job_ids, } async def search_people( diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index a7ab34ca..f4dc7432 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -69,32 +69,37 @@ async def search_jobs( keywords: str, ctx: Context, location: str | None = None, + max_pages: int = 3, extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ Search for jobs on LinkedIn. + Returns job_ids that can be passed to get_job_details for full info. + Args: keywords: Search keywords (e.g., "software engineer", "data scientist") ctx: FastMCP context for progress reporting location: Optional location filter (e.g., "San Francisco", "Remote") + max_pages: Maximum number of result pages to load (1-10, default 3) Returns: - Dict with url and sections (name -> raw text). - The LLM should parse the raw text to extract job listings. + Dict with url, sections (name -> raw text), and job_ids (list of + numeric job ID strings usable with get_job_details). """ try: logger.info( - "Searching jobs: keywords='%s', location='%s'", + "Searching jobs: keywords='%s', location='%s', max_pages=%d", keywords, location, + max_pages, ) await ctx.report_progress( progress=0, total=100, message="Starting job search" ) - result = await extractor.search_jobs(keywords, location) + result = await extractor.search_jobs(keywords, location, max_pages) await ctx.report_progress(progress=100, total=100, message="Complete") diff --git a/tests/test_scraping.py b/tests/test_scraping.py index c97440dd..1115bb1f 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -433,21 +433,156 @@ async def test_scrape_job(self, mock_page): assert "pages_visited" not in result assert "sections_requested" not in result - async def test_search_jobs(self, mock_page): - extractor = LinkedInExtractor(mock_page) - with patch.object( - extractor, - "extract_page", - new_callable=AsyncMock, - return_value="Job 1\nJob 2", + +class TestSearchJobs: + """Tests for search_jobs with job ID extraction and pagination.""" + + @pytest.fixture + def _patch_search_deps(self): + """Patch all external dependencies used by search_jobs.""" + with ( + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + patch( + "linkedin_mcp_server.scraping.extractor.scroll_job_sidebar", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), ): - result = await extractor.search_jobs("python", "Remote") + yield + + async def test_returns_job_ids(self, mock_page, _patch_search_deps): + """search_jobs should return a job_ids list extracted from hrefs.""" + mock_page.evaluate = AsyncMock( + side_effect=[ + ["111", "222", "333"], # _extract_job_ids + "Job 1\nJob 2\nJob 3", # innerText + ] + ) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", max_pages=1) + + assert result["job_ids"] == ["111", "222", "333"] + assert "search_results" in result["sections"] + + async def test_pagination_uses_dynamic_start(self, mock_page, _patch_search_deps): + """Pages after the first should use &start= based on unique IDs seen.""" + call_count = 0 + + async def evaluate_side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count % 2 == 1: + if call_count == 1: + return ["100", "200", "300"] + return ["400", "500"] + return f"Page text {call_count // 2}" + + mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", max_pages=2) + + assert result["job_ids"] == ["100", "200", "300", "400", "500"] + goto_calls = mock_page.goto.call_args_list + assert len(goto_calls) == 2 + assert "&start=3" in goto_calls[1].args[0] + async def test_deduplication_across_pages(self, mock_page, _patch_search_deps): + """Duplicate job IDs across pages should be deduplicated.""" + call_count = 0 + + async def evaluate_side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count % 2 == 1: + if call_count == 1: + return ["100", "200"] + return ["200", "300"] # 200 is duplicate + return "text" + + mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", max_pages=2) + + assert result["job_ids"] == ["100", "200", "300"] + + async def test_early_stop_no_new_ids(self, mock_page, _patch_search_deps): + """Should stop early when a page yields no new job IDs.""" + call_count = 0 + + async def evaluate_side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count % 2 == 1: + if call_count == 1: + return ["100", "200"] + return ["100", "200"] # All duplicates + return "text" + + mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", max_pages=5) + + assert result["job_ids"] == ["100", "200"] + assert mock_page.goto.await_count == 2 + + async def test_max_pages_clamped(self, mock_page, _patch_search_deps): + """max_pages should be clamped to 1-10 range.""" + mock_page.evaluate = AsyncMock(side_effect=[[], "text"]) + extractor = LinkedInExtractor(mock_page) + + result = await extractor.search_jobs("python", max_pages=0) + assert "job_ids" in result + assert mock_page.goto.await_count == 1 + + async def test_single_page(self, mock_page, _patch_search_deps): + """max_pages=1 should only visit one page.""" + mock_page.evaluate = AsyncMock(side_effect=[["42"], "Job posting text"]) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", "Remote", max_pages=1) + + assert result["job_ids"] == ["42"] assert "keywords=python" in result["url"] assert "location=Remote" in result["url"] - assert "search_results" in result["sections"] - assert "pages_visited" not in result - assert "sections_requested" not in result + assert mock_page.goto.await_count == 1 + + async def test_page_texts_joined_with_separator( + self, mock_page, _patch_search_deps + ): + """Multiple pages should join text with --- separator.""" + call_count = 0 + + async def evaluate_side_effect(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count % 2 == 1: + return [str(call_count * 100)] + return f"Page {call_count // 2} content" + + mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("python", max_pages=2) + + assert "\n---\n" in result["sections"]["search_results"] + + async def test_empty_results(self, mock_page, _patch_search_deps): + """Should handle empty results gracefully.""" + mock_page.evaluate = AsyncMock(side_effect=[[], ""]) + extractor = LinkedInExtractor(mock_page) + result = await extractor.search_jobs("nonexistent_xyz") + + assert result["job_ids"] == [] + assert result["sections"] == {} class TestStripLinkedInNoise: From f24dd88944ef064c1965b214a21d1e0b2e691eca Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 15:22:45 +0100 Subject: [PATCH 442/565] fix(tools): address Copilot review feedback on search_jobs - Use fixed 25-per-page offset instead of dynamic ID count - Read "Page X of Y" from pagination state to cap pagination - Add soft rate-limit retry via _extract_search_page helper - Use keyword arguments in tool wrapper for clarity --- linkedin_mcp_server/scraping/extractor.py | 125 ++++++-- linkedin_mcp_server/tools/job.py | 4 +- tests/test_scraping.py | 371 ++++++++++++++++------ 3 files changed, 385 insertions(+), 115 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index fcb87f15..743d9b2f 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -310,6 +310,82 @@ async def _extract_job_ids(self) -> list[str]: }""" ) + async def _extract_search_page(self, url: str) -> str: + """Extract innerText from a job search page with soft rate-limit retry. + + Mirrors the noise-only detection and single-retry behavior of + ``extract_page`` / ``_extract_page_once`` so that callers get a + ``_RATE_LIMITED_MSG`` sentinel instead of silent empty results. + """ + try: + result = await self._extract_search_page_once(url) + if result != _RATE_LIMITED_MSG: + return result + + logger.info( + "Retrying search page %s after %.0fs backoff", + url, + _RATE_LIMIT_RETRY_DELAY, + ) + await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) + return await self._extract_search_page_once(url) + + except LinkedInScraperException: + raise + except Exception as e: + logger.warning("Failed to extract search page %s: %s", url, e) + return "" + + async def _extract_search_page_once(self, url: str) -> str: + """Single attempt to navigate, scroll sidebar, and extract innerText.""" + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + try: + await self._page.wait_for_selector("main", timeout=5000) + except PlaywrightTimeoutError: + logger.debug("No
element found on %s", url) + + await handle_modal_close(self._page) + await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) + + raw = await self._page.evaluate( + """() => { + const main = document.querySelector('main'); + return main ? main.innerText : document.body.innerText; + }""" + ) + + if not raw: + return "" + cleaned = strip_linkedin_noise(raw) + if not cleaned and raw.strip(): + logger.warning( + "Search page %s returned only LinkedIn chrome (likely rate-limited)", + url, + ) + return _RATE_LIMITED_MSG + return cleaned + + async def _get_total_search_pages(self) -> int | None: + """Read total page count from LinkedIn's pagination state element. + + Parses the "Page X of Y" text from ``.jobs-search-pagination__page-state``. + Returns ``None`` when the element is absent or unparseable. + """ + text = await self._page.evaluate( + """() => { + const el = document.querySelector( + '.jobs-search-pagination__page-state' + ); + return el ? el.innerText.trim() : null; + }""" + ) + if not text: + return None + match = re.search(r"of\s+(\d+)", text) + return int(match.group(1)) if match else None + async def search_jobs( self, keywords: str, @@ -319,7 +395,8 @@ async def search_jobs( """Search for jobs with pagination and job ID extraction. Scrolls the job sidebar (not the main page) and paginates through - results. Stops early when a page yields no new job IDs. + results. Uses LinkedIn's "Page X of Y" indicator to cap pagination, + and stops early when a page yields no new job IDs. Args: keywords: Search keywords @@ -329,6 +406,9 @@ async def search_jobs( Returns: {url, sections: {search_results: text}, job_ids: [str]} """ + # LinkedIn shows 25 results per page + _PAGE_SIZE = 25 + max_pages = max(1, min(10, max_pages)) params = f"keywords={quote_plus(keywords)}" @@ -339,26 +419,34 @@ async def search_jobs( all_job_ids: list[str] = [] seen_ids: set[str] = set() page_texts: list[str] = [] + total_pages: int | None = None for page_num in range(max_pages): + # Stop if we already know we've reached the last page + if total_pages is not None and page_num >= total_pages: + logger.debug( + "Reached last page (%d of %d), stopping", + page_num, + total_pages, + ) + break + if page_num > 0: await asyncio.sleep(_NAV_DELAY) - url = base_url if page_num == 0 else f"{base_url}&start={len(seen_ids)}" + offset = page_num * _PAGE_SIZE + url = base_url if page_num == 0 else f"{base_url}&start={offset}" try: - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) - await detect_rate_limit(self._page) - - try: - await self._page.wait_for_selector("main", timeout=5000) - except PlaywrightTimeoutError: - logger.debug("No
element found on %s", url) + text = await self._extract_search_page(url) - await handle_modal_close(self._page) - await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) + # Read total pages from pagination state (e.g. "Page 1 of 40") + if total_pages is None: + total_pages = await self._get_total_search_pages() + if total_pages is not None: + logger.debug("LinkedIn reports %d total pages", total_pages) - # Extract job IDs from hrefs + # Extract job IDs from hrefs (page is already loaded) page_ids = await self._extract_job_ids() new_ids = [jid for jid in page_ids if jid not in seen_ids] @@ -370,17 +458,8 @@ async def search_jobs( seen_ids.add(jid) all_job_ids.append(jid) - # Extract innerText - raw = await self._page.evaluate( - """() => { - const main = document.querySelector('main'); - return main ? main.innerText : document.body.innerText; - }""" - ) - if raw: - cleaned = strip_linkedin_noise(raw) - if cleaned: - page_texts.append(cleaned) + if text and text != _RATE_LIMITED_MSG: + page_texts.append(text) except LinkedInScraperException: raise diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index f4dc7432..20b4ea1f 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -99,7 +99,9 @@ async def search_jobs( progress=0, total=100, message="Starting job search" ) - result = await extractor.search_jobs(keywords, location, max_pages) + result = await extractor.search_jobs( + keywords, location=location, max_pages=max_pages + ) await ctx.report_progress(progress=100, total=100, message="Complete") diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 1115bb1f..3cea2e86 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -437,153 +437,342 @@ async def test_scrape_job(self, mock_page): class TestSearchJobs: """Tests for search_jobs with job ID extraction and pagination.""" - @pytest.fixture - def _patch_search_deps(self): - """Patch all external dependencies used by search_jobs.""" + async def test_returns_job_ids(self, mock_page): + """search_jobs should return a job_ids list extracted from hrefs.""" + extractor = LinkedInExtractor(mock_page) with ( - patch( - "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + patch.object( + extractor, + "_extract_search_page", new_callable=AsyncMock, + return_value="Job 1\nJob 2\nJob 3", ), - patch( - "linkedin_mcp_server.scraping.extractor.handle_modal_close", + patch.object( + extractor, + "_extract_job_ids", new_callable=AsyncMock, - return_value=False, + return_value=["111", "222", "333"], ), - patch( - "linkedin_mcp_server.scraping.extractor.scroll_job_sidebar", + patch.object( + extractor, + "_get_total_search_pages", new_callable=AsyncMock, + return_value=None, ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", new_callable=AsyncMock, ), ): - yield - - async def test_returns_job_ids(self, mock_page, _patch_search_deps): - """search_jobs should return a job_ids list extracted from hrefs.""" - mock_page.evaluate = AsyncMock( - side_effect=[ - ["111", "222", "333"], # _extract_job_ids - "Job 1\nJob 2\nJob 3", # innerText - ] - ) - extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", max_pages=1) + result = await extractor.search_jobs("python", max_pages=1) assert result["job_ids"] == ["111", "222", "333"] assert "search_results" in result["sections"] - async def test_pagination_uses_dynamic_start(self, mock_page, _patch_search_deps): - """Pages after the first should use &start= based on unique IDs seen.""" - call_count = 0 + async def test_pagination_uses_fixed_page_size(self, mock_page): + """Pages use &start= with fixed 25-per-page offset.""" + extractor = LinkedInExtractor(mock_page) + page1_ids = ["100", "200", "300"] + page2_ids = ["400", "500"] + id_pages = iter([page1_ids, page2_ids]) + text_pages = iter(["Page 1 text", "Page 2 text"]) + urls_visited: list[str] = [] - async def evaluate_side_effect(*args, **kwargs): - nonlocal call_count - call_count += 1 - if call_count % 2 == 1: - if call_count == 1: - return ["100", "200", "300"] - return ["400", "500"] - return f"Page text {call_count // 2}" + async def mock_extract(url): + urls_visited.append(url) + return next(text_pages) - mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) - extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", max_pages=2) + with ( + patch.object(extractor, "_extract_search_page", side_effect=mock_extract), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + side_effect=lambda: next(id_pages), + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=5, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=2) assert result["job_ids"] == ["100", "200", "300", "400", "500"] - goto_calls = mock_page.goto.call_args_list - assert len(goto_calls) == 2 - assert "&start=3" in goto_calls[1].args[0] + assert len(urls_visited) == 2 + assert "&start=25" in urls_visited[1] - async def test_deduplication_across_pages(self, mock_page, _patch_search_deps): + async def test_deduplication_across_pages(self, mock_page): """Duplicate job IDs across pages should be deduplicated.""" - call_count = 0 - - async def evaluate_side_effect(*args, **kwargs): - nonlocal call_count - call_count += 1 - if call_count % 2 == 1: - if call_count == 1: - return ["100", "200"] - return ["200", "300"] # 200 is duplicate - return "text" - - mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", max_pages=2) + id_pages = iter([["100", "200"], ["200", "300"]]) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="text", + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + side_effect=lambda: next(id_pages), + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=2) assert result["job_ids"] == ["100", "200", "300"] - async def test_early_stop_no_new_ids(self, mock_page, _patch_search_deps): + async def test_early_stop_no_new_ids(self, mock_page): """Should stop early when a page yields no new job IDs.""" - call_count = 0 + extractor = LinkedInExtractor(mock_page) + # Page 2 returns same IDs as page 1 + id_pages = iter([["100", "200"], ["100", "200"]]) + extract_call_count = 0 - async def evaluate_side_effect(*args, **kwargs): - nonlocal call_count - call_count += 1 - if call_count % 2 == 1: - if call_count == 1: - return ["100", "200"] - return ["100", "200"] # All duplicates + async def mock_extract(url): + nonlocal extract_call_count + extract_call_count += 1 return "text" - mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) - extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", max_pages=5) + with ( + patch.object(extractor, "_extract_search_page", side_effect=mock_extract), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + side_effect=lambda: next(id_pages), + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=5) assert result["job_ids"] == ["100", "200"] - assert mock_page.goto.await_count == 2 + assert extract_call_count == 2 + + async def test_stops_at_total_pages(self, mock_page): + """Should stop when total_pages from pagination state is reached.""" + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="text", + ) as mock_extract, + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=["100"], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=2, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=10) - async def test_max_pages_clamped(self, mock_page, _patch_search_deps): + # Should only visit 2 pages despite max_pages=10 + assert mock_extract.await_count == 2 + assert "job_ids" in result + + async def test_max_pages_clamped(self, mock_page): """max_pages should be clamped to 1-10 range.""" - mock_page.evaluate = AsyncMock(side_effect=[[], "text"]) extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="text", + ) as mock_extract, + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=[], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=0) - result = await extractor.search_jobs("python", max_pages=0) assert "job_ids" in result - assert mock_page.goto.await_count == 1 + assert mock_extract.await_count == 1 - async def test_single_page(self, mock_page, _patch_search_deps): + async def test_single_page(self, mock_page): """max_pages=1 should only visit one page.""" - mock_page.evaluate = AsyncMock(side_effect=[["42"], "Job posting text"]) extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", "Remote", max_pages=1) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="Job posting text", + ) as mock_extract, + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=["42"], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", "Remote", max_pages=1) assert result["job_ids"] == ["42"] assert "keywords=python" in result["url"] assert "location=Remote" in result["url"] - assert mock_page.goto.await_count == 1 + assert mock_extract.await_count == 1 - async def test_page_texts_joined_with_separator( - self, mock_page, _patch_search_deps - ): + async def test_page_texts_joined_with_separator(self, mock_page): """Multiple pages should join text with --- separator.""" - call_count = 0 - - async def evaluate_side_effect(*args, **kwargs): - nonlocal call_count - call_count += 1 - if call_count % 2 == 1: - return [str(call_count * 100)] - return f"Page {call_count // 2} content" - - mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("python", max_pages=2) + text_pages = iter(["Page 1 content", "Page 2 content"]) + id_pages = iter([["100"], ["200"]]) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + side_effect=lambda url: next(text_pages), + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + side_effect=lambda: next(id_pages), + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=2) assert "\n---\n" in result["sections"]["search_results"] + assert "Page 1 content" in result["sections"]["search_results"] + assert "Page 2 content" in result["sections"]["search_results"] - async def test_empty_results(self, mock_page, _patch_search_deps): + async def test_empty_results(self, mock_page): """Should handle empty results gracefully.""" - mock_page.evaluate = AsyncMock(side_effect=[[], ""]) extractor = LinkedInExtractor(mock_page) - result = await extractor.search_jobs("nonexistent_xyz") + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="", + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=[], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("nonexistent_xyz") assert result["job_ids"] == [] assert result["sections"] == {} + async def test_rate_limited_text_excluded(self, mock_page): + """Rate-limited pages should not appear in sections text.""" + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value=_RATE_LIMITED_MSG, + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=["100"], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=1) + + assert result["job_ids"] == ["100"] + assert result["sections"] == {} + class TestStripLinkedInNoise: def test_strips_footer(self): From 751995e4933293c5f70da7de3365f963432d4d55 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 15:31:39 +0100 Subject: [PATCH 443/565] fix(tools): address Greptile review feedback - Stop on page 0 when no job IDs found (avoid useless page 1) - Fix test_stops_at_total_pages to use distinct IDs per page so only the total_pages guard stops pagination --- linkedin_mcp_server/scraping/extractor.py | 7 +++++-- tests/test_scraping.py | 6 ++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 743d9b2f..40c3b9a0 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -450,8 +450,11 @@ async def search_jobs( page_ids = await self._extract_job_ids() new_ids = [jid for jid in page_ids if jid not in seen_ids] - if not new_ids and page_num > 0: - logger.debug("No new job IDs on page %d, stopping", page_num + 1) + if not new_ids: + if page_num > 0: + logger.debug( + "No new job IDs on page %d, stopping", page_num + 1 + ) break for jid in new_ids: diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 3cea2e86..822b3b5a 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -578,6 +578,8 @@ async def mock_extract(url): async def test_stops_at_total_pages(self, mock_page): """Should stop when total_pages from pagination state is reached.""" extractor = LinkedInExtractor(mock_page) + # Distinct IDs per page so the no-new-IDs guard never fires + id_pages = iter([["100"], ["200"]]) with ( patch.object( extractor, @@ -589,7 +591,7 @@ async def test_stops_at_total_pages(self, mock_page): extractor, "_extract_job_ids", new_callable=AsyncMock, - return_value=["100"], + side_effect=lambda: next(id_pages), ), patch.object( extractor, @@ -606,7 +608,7 @@ async def test_stops_at_total_pages(self, mock_page): # Should only visit 2 pages despite max_pages=10 assert mock_extract.await_count == 2 - assert "job_ids" in result + assert result["job_ids"] == ["100", "200"] async def test_max_pages_clamped(self, mock_page): """max_pages should be clamped to 1-10 range.""" From 42cf3b8647590c36ffdfc7c49cefaa13de0b0fbb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 16:08:14 +0100 Subject: [PATCH 444/565] feat(tools): add search filters and fix Greptile review items Add date_posted, job_type, experience_level, work_type, easy_apply, and sort_by filters to search_jobs with human-readable normalization. Fix Greptile review: always log no-results break, move _PAGE_SIZE to module level, add Field(ge=1, le=10) on max_pages, skip ID extraction on empty text. Resolves: #174 --- linkedin_mcp_server/scraping/extractor.py | 105 +++++++++++++++++++--- linkedin_mcp_server/tools/job.py | 27 +++++- tests/test_scraping.py | 105 ++++++++++++++++++++-- 3 files changed, 212 insertions(+), 25 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 40c3b9a0..bbf4dd66 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -31,6 +31,30 @@ # Patterns that mark the start of LinkedIn page chrome (sidebar/footer). # Everything from the earliest match onwards is stripped. +# LinkedIn shows 25 results per page +_PAGE_SIZE = 25 + +# Normalization maps for job search filters +_DATE_POSTED_MAP = { + "past_hour": "r3600", + "past_24_hours": "r86400", + "past_week": "r604800", + "past_month": "r2592000", +} + +_EXPERIENCE_LEVEL_MAP = { + "internship": "1", + "entry": "2", + "associate": "3", + "mid_senior": "4", + "director": "5", + "executive": "6", +} + +_WORK_TYPE_MAP = {"on_site": "1", "remote": "2", "hybrid": "3"} + +_SORT_BY_MAP = {"date": "DD", "relevance": "R"} + _NOISE_MARKERS: list[re.Pattern[str]] = [ # Footer nav links: "About" immediately followed by "Accessibility" or "Talent Solutions" re.compile(r"^About\n+(?:Accessibility|Talent Solutions)", re.MULTILINE), @@ -386,11 +410,57 @@ async def _get_total_search_pages(self) -> int | None: match = re.search(r"of\s+(\d+)", text) return int(match.group(1)) if match else None + @staticmethod + def _build_job_search_url( + keywords: str, + location: str | None = None, + date_posted: str | None = None, + job_type: str | None = None, + experience_level: str | None = None, + work_type: str | None = None, + easy_apply: bool = False, + sort_by: str | None = None, + ) -> str: + """Build a LinkedIn job search URL with optional filters. + + Human-readable names are normalized to LinkedIn URL codes. + Comma-separated values are normalized individually. + Unknown values pass through unchanged. + """ + params = f"keywords={quote_plus(keywords)}" + if location: + params += f"&location={quote_plus(location)}" + + def _normalize_csv(value: str, mapping: dict[str, str]) -> str: + parts = [v.strip() for v in value.split(",")] + return ",".join(mapping.get(p, p) for p in parts) + + if date_posted: + params += f"&f_TPR={_normalize_csv(date_posted, _DATE_POSTED_MAP)}" + if job_type: + params += f"&f_JT={quote_plus(job_type)}" + if experience_level: + params += f"&f_E={_normalize_csv(experience_level, _EXPERIENCE_LEVEL_MAP)}" + if work_type: + params += f"&f_WT={_normalize_csv(work_type, _WORK_TYPE_MAP)}" + if easy_apply: + params += "&f_EA=true" + if sort_by: + params += f"&sortBy={_normalize_csv(sort_by, _SORT_BY_MAP)}" + + return f"https://www.linkedin.com/jobs/search/?{params}" + async def search_jobs( self, keywords: str, location: str | None = None, max_pages: int = 3, + date_posted: str | None = None, + job_type: str | None = None, + experience_level: str | None = None, + work_type: str | None = None, + easy_apply: bool = False, + sort_by: str | None = None, ) -> dict[str, Any]: """Search for jobs with pagination and job ID extraction. @@ -402,20 +472,26 @@ async def search_jobs( keywords: Search keywords location: Optional location filter max_pages: Maximum pages to load (1-10, default 3) + date_posted: Filter by date posted (past_hour, past_24_hours, past_week, past_month) + job_type: Filter by job type (LinkedIn f_JT code) + experience_level: Filter by experience level (internship, entry, associate, mid_senior, director, executive) + work_type: Filter by work type (on_site, remote, hybrid) + easy_apply: Only show Easy Apply jobs + sort_by: Sort results (date, relevance) Returns: {url, sections: {search_results: text}, job_ids: [str]} """ - # LinkedIn shows 25 results per page - _PAGE_SIZE = 25 - - max_pages = max(1, min(10, max_pages)) - - params = f"keywords={quote_plus(keywords)}" - if location: - params += f"&location={quote_plus(location)}" - - base_url = f"https://www.linkedin.com/jobs/search/?{params}" + base_url = self._build_job_search_url( + keywords, + location=location, + date_posted=date_posted, + job_type=job_type, + experience_level=experience_level, + work_type=work_type, + easy_apply=easy_apply, + sort_by=sort_by, + ) all_job_ids: list[str] = [] seen_ids: set[str] = set() page_texts: list[str] = [] @@ -440,6 +516,10 @@ async def search_jobs( try: text = await self._extract_search_page(url) + if not text: + # Navigation may have failed; skip ID extraction to avoid stale DOM + break + # Read total pages from pagination state (e.g. "Page 1 of 40") if total_pages is None: total_pages = await self._get_total_search_pages() @@ -451,10 +531,7 @@ async def search_jobs( new_ids = [jid for jid in page_ids if jid not in seen_ids] if not new_ids: - if page_num > 0: - logger.debug( - "No new job IDs on page %d, stopping", page_num + 1 - ) + logger.debug("No new job IDs on page %d, stopping", page_num + 1) break for jid in new_ids: diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 20b4ea1f..f63ef514 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -5,10 +5,11 @@ """ import logging -from typing import Any +from typing import Annotated, Any from fastmcp import Context, FastMCP from fastmcp.dependencies import Depends +from pydantic import Field from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS from linkedin_mcp_server.dependencies import get_extractor @@ -69,7 +70,13 @@ async def search_jobs( keywords: str, ctx: Context, location: str | None = None, - max_pages: int = 3, + max_pages: Annotated[int, Field(ge=1, le=10)] = 3, + date_posted: str | None = None, + job_type: str | None = None, + experience_level: str | None = None, + work_type: str | None = None, + easy_apply: bool = False, + sort_by: str | None = None, extractor: LinkedInExtractor = Depends(get_extractor), ) -> dict[str, Any]: """ @@ -82,6 +89,12 @@ async def search_jobs( ctx: FastMCP context for progress reporting location: Optional location filter (e.g., "San Francisco", "Remote") max_pages: Maximum number of result pages to load (1-10, default 3) + date_posted: Filter by posting date (past_hour, past_24_hours, past_week, past_month) + job_type: Filter by job type (LinkedIn f_JT code, e.g., "F" for full-time) + experience_level: Filter by experience level, comma-separated (internship, entry, associate, mid_senior, director, executive) + work_type: Filter by work type, comma-separated (on_site, remote, hybrid) + easy_apply: Only show Easy Apply jobs (default false) + sort_by: Sort results (date, relevance) Returns: Dict with url, sections (name -> raw text), and job_ids (list of @@ -100,7 +113,15 @@ async def search_jobs( ) result = await extractor.search_jobs( - keywords, location=location, max_pages=max_pages + keywords, + location=location, + max_pages=max_pages, + date_posted=date_posted, + job_type=job_type, + experience_level=experience_level, + work_type=work_type, + easy_apply=easy_apply, + sort_by=sort_by, ) await ctx.report_progress(progress=100, total=100, message="Complete") diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 822b3b5a..fcb8f156 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -11,6 +11,83 @@ ) +class TestBuildJobSearchUrl: + """Tests for _build_job_search_url URL construction.""" + + def test_keywords_only(self): + url = LinkedInExtractor._build_job_search_url("python developer") + assert url == "https://www.linkedin.com/jobs/search/?keywords=python+developer" + + def test_with_location(self): + url = LinkedInExtractor._build_job_search_url("python", location="Remote") + assert "keywords=python" in url + assert "location=Remote" in url + + def test_date_posted_normalization(self): + url = LinkedInExtractor._build_job_search_url("python", date_posted="past_week") + assert "f_TPR=r604800" in url + + def test_date_posted_passthrough(self): + url = LinkedInExtractor._build_job_search_url("python", date_posted="r3600") + assert "f_TPR=r3600" in url + + def test_experience_level_normalization(self): + url = LinkedInExtractor._build_job_search_url( + "python", experience_level="entry" + ) + assert "f_E=2" in url + + def test_experience_level_csv(self): + url = LinkedInExtractor._build_job_search_url( + "python", experience_level="entry,director" + ) + assert "f_E=2,5" in url + + def test_work_type_normalization(self): + url = LinkedInExtractor._build_job_search_url("python", work_type="remote") + assert "f_WT=2" in url + + def test_work_type_csv(self): + url = LinkedInExtractor._build_job_search_url( + "python", work_type="on_site,hybrid" + ) + assert "f_WT=1,3" in url + + def test_easy_apply(self): + url = LinkedInExtractor._build_job_search_url("python", easy_apply=True) + assert "f_EA=true" in url + + def test_easy_apply_false_omitted(self): + url = LinkedInExtractor._build_job_search_url("python", easy_apply=False) + assert "f_EA" not in url + + def test_sort_by_normalization(self): + url = LinkedInExtractor._build_job_search_url("python", sort_by="date") + assert "sortBy=DD" in url + + def test_job_type_passthrough(self): + url = LinkedInExtractor._build_job_search_url("python", job_type="F") + assert "f_JT=F" in url + + def test_all_filters_combined(self): + url = LinkedInExtractor._build_job_search_url( + "python", + location="Berlin", + date_posted="past_week", + experience_level="entry,mid_senior", + work_type="remote", + easy_apply=True, + sort_by="date", + ) + assert "keywords=python" in url + assert "location=Berlin" in url + assert "f_TPR=r604800" in url + assert "f_E=2,4" in url + assert "f_WT=2" in url + assert "f_EA=true" in url + assert "sortBy=DD" in url + + @pytest.fixture def mock_page(): """Create a mock Patchright page.""" @@ -610,8 +687,8 @@ async def test_stops_at_total_pages(self, mock_page): assert mock_extract.await_count == 2 assert result["job_ids"] == ["100", "200"] - async def test_max_pages_clamped(self, mock_page): - """max_pages should be clamped to 1-10 range.""" + async def test_zero_max_pages_fetches_nothing(self, mock_page): + """max_pages=0 should fetch zero pages (validation at tool boundary).""" extractor = LinkedInExtractor(mock_page) with ( patch.object( @@ -639,11 +716,11 @@ async def test_max_pages_clamped(self, mock_page): ): result = await extractor.search_jobs("python", max_pages=0) - assert "job_ids" in result - assert mock_extract.await_count == 1 + assert result["job_ids"] == [] + assert mock_extract.await_count == 0 async def test_single_page(self, mock_page): - """max_pages=1 should only visit one page.""" + """max_pages=1 should only visit one page; filters appear in URL.""" extractor = LinkedInExtractor(mock_page) with ( patch.object( @@ -669,11 +746,21 @@ async def test_single_page(self, mock_page): new_callable=AsyncMock, ), ): - result = await extractor.search_jobs("python", "Remote", max_pages=1) + result = await extractor.search_jobs( + "python", + "Remote", + max_pages=1, + date_posted="past_week", + work_type="remote", + easy_apply=True, + ) assert result["job_ids"] == ["42"] assert "keywords=python" in result["url"] assert "location=Remote" in result["url"] + assert "f_TPR=r604800" in result["url"] + assert "f_WT=2" in result["url"] + assert "f_EA=true" in result["url"] assert mock_extract.await_count == 1 async def test_page_texts_joined_with_separator(self, mock_page): @@ -712,7 +799,7 @@ async def test_page_texts_joined_with_separator(self, mock_page): assert "Page 2 content" in result["sections"]["search_results"] async def test_empty_results(self, mock_page): - """Should handle empty results gracefully.""" + """Should handle empty results gracefully and skip ID extraction.""" extractor = LinkedInExtractor(mock_page) with ( patch.object( @@ -726,7 +813,7 @@ async def test_empty_results(self, mock_page): "_extract_job_ids", new_callable=AsyncMock, return_value=[], - ), + ) as mock_ids, patch.object( extractor, "_get_total_search_pages", @@ -742,6 +829,8 @@ async def test_empty_results(self, mock_page): assert result["job_ids"] == [] assert result["sections"] == {} + # Empty text should skip ID extraction to avoid stale DOM + mock_ids.assert_not_awaited() async def test_rate_limited_text_excluded(self, mock_page): """Rate-limited pages should not appear in sections text.""" From 403664553664ad2a7f11ef70a2cd37717ab59b61 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 16:16:53 +0100 Subject: [PATCH 445/565] fix(tools): normalize job_type with _JOB_TYPE_MAP instead of quote_plus Use _normalize_csv for job_type to preserve raw commas in multi-value filters and add human-readable names (full_time, contract, etc.). --- linkedin_mcp_server/scraping/extractor.py | 14 ++++++++++++-- linkedin_mcp_server/tools/job.py | 2 +- tests/test_scraping.py | 10 ++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index bbf4dd66..1833ef75 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -51,6 +51,16 @@ "executive": "6", } +_JOB_TYPE_MAP = { + "full_time": "F", + "part_time": "P", + "contract": "C", + "temporary": "T", + "volunteer": "V", + "internship": "I", + "other": "O", +} + _WORK_TYPE_MAP = {"on_site": "1", "remote": "2", "hybrid": "3"} _SORT_BY_MAP = {"date": "DD", "relevance": "R"} @@ -438,7 +448,7 @@ def _normalize_csv(value: str, mapping: dict[str, str]) -> str: if date_posted: params += f"&f_TPR={_normalize_csv(date_posted, _DATE_POSTED_MAP)}" if job_type: - params += f"&f_JT={quote_plus(job_type)}" + params += f"&f_JT={_normalize_csv(job_type, _JOB_TYPE_MAP)}" if experience_level: params += f"&f_E={_normalize_csv(experience_level, _EXPERIENCE_LEVEL_MAP)}" if work_type: @@ -473,7 +483,7 @@ async def search_jobs( location: Optional location filter max_pages: Maximum pages to load (1-10, default 3) date_posted: Filter by date posted (past_hour, past_24_hours, past_week, past_month) - job_type: Filter by job type (LinkedIn f_JT code) + job_type: Filter by job type (full_time, part_time, contract, temporary, volunteer, internship, other) experience_level: Filter by experience level (internship, entry, associate, mid_senior, director, executive) work_type: Filter by work type (on_site, remote, hybrid) easy_apply: Only show Easy Apply jobs diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index f63ef514..90d190a5 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -90,7 +90,7 @@ async def search_jobs( location: Optional location filter (e.g., "San Francisco", "Remote") max_pages: Maximum number of result pages to load (1-10, default 3) date_posted: Filter by posting date (past_hour, past_24_hours, past_week, past_month) - job_type: Filter by job type (LinkedIn f_JT code, e.g., "F" for full-time) + job_type: Filter by job type, comma-separated (full_time, part_time, contract, temporary, volunteer, internship, other) experience_level: Filter by experience level, comma-separated (internship, entry, associate, mid_senior, director, executive) work_type: Filter by work type, comma-separated (on_site, remote, hybrid) easy_apply: Only show Easy Apply jobs (default false) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index fcb8f156..3f9f51b8 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -65,6 +65,16 @@ def test_sort_by_normalization(self): url = LinkedInExtractor._build_job_search_url("python", sort_by="date") assert "sortBy=DD" in url + def test_job_type_normalization(self): + url = LinkedInExtractor._build_job_search_url("python", job_type="full_time") + assert "f_JT=F" in url + + def test_job_type_csv(self): + url = LinkedInExtractor._build_job_search_url( + "python", job_type="full_time,contract" + ) + assert "f_JT=F,C" in url + def test_job_type_passthrough(self): url = LinkedInExtractor._build_job_search_url("python", job_type="F") assert "f_JT=F" in url From e8f7c9a81898f3f597658bfd340611c81787a095 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 16:30:45 +0100 Subject: [PATCH 446/565] fix(tools): skip job ID extraction on rate-limited pages Break early when _extract_search_page returns _RATE_LIMITED_MSG to avoid extracting IDs from unreliable DOM state. Remove redundant truthiness check now guarded by the early break. --- linkedin_mcp_server/scraping/extractor.py | 6 +++--- tests/test_scraping.py | 9 +++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 1833ef75..acaf504e 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -526,8 +526,8 @@ async def search_jobs( try: text = await self._extract_search_page(url) - if not text: - # Navigation may have failed; skip ID extraction to avoid stale DOM + if not text or text == _RATE_LIMITED_MSG: + # Navigation failed or rate-limited; skip ID extraction break # Read total pages from pagination state (e.g. "Page 1 of 40") @@ -548,7 +548,7 @@ async def search_jobs( seen_ids.add(jid) all_job_ids.append(jid) - if text and text != _RATE_LIMITED_MSG: + if text != _RATE_LIMITED_MSG: page_texts.append(text) except LinkedInScraperException: diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 3f9f51b8..f098a910 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -842,8 +842,8 @@ async def test_empty_results(self, mock_page): # Empty text should skip ID extraction to avoid stale DOM mock_ids.assert_not_awaited() - async def test_rate_limited_text_excluded(self, mock_page): - """Rate-limited pages should not appear in sections text.""" + async def test_rate_limited_skips_ids_and_text(self, mock_page): + """Rate-limited pages should yield no IDs or text.""" extractor = LinkedInExtractor(mock_page) with ( patch.object( @@ -857,7 +857,7 @@ async def test_rate_limited_text_excluded(self, mock_page): "_extract_job_ids", new_callable=AsyncMock, return_value=["100"], - ), + ) as mock_ids, patch.object( extractor, "_get_total_search_pages", @@ -871,8 +871,9 @@ async def test_rate_limited_text_excluded(self, mock_page): ): result = await extractor.search_jobs("python", max_pages=1) - assert result["job_ids"] == ["100"] + assert result["job_ids"] == [] assert result["sections"] == {} + mock_ids.assert_not_awaited() class TestStripLinkedInNoise: From 196b1515c9632794ceef62a6e78aa5c933be3ad7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 16:48:56 +0100 Subject: [PATCH 447/565] fix(tools): move _normalize_csv to module level, wait for job cards Move _normalize_csv out of _build_job_search_url to module level for reusability. Wait for job card links before sidebar scrolling to handle async rendering. Document DOM-independence principle in CONTRIBUTING.md and AGENTS.md. --- AGENTS.md | 1 + CONTRIBUTING.md | 10 ++++++++++ linkedin_mcp_server/core/utils.py | 7 +++++++ linkedin_mcp_server/scraping/extractor.py | 11 +++++++---- 4 files changed, 25 insertions(+), 4 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 94900c58..48e79ebf 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -72,6 +72,7 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec - `fields.py` - `PERSON_SECTIONS` and `COMPANY_SECTIONS` config dicts mapping section name to `(url_suffix, is_overlay)` - `extractor.py` - `LinkedInExtractor` class using navigate-scroll-innerText pattern - **One section = one navigation.** Each entry in `PERSON_SECTIONS` / `COMPANY_SECTIONS` maps to exactly one page navigation. Never combine multiple URLs behind a single section. +- **Minimize DOM dependence.** Prefer innerText and URL navigation over DOM selectors. When DOM access is unavoidable (e.g. extracting `href` attributes, finding scrollable containers), use minimal generic selectors (`a[href*="/jobs/view/"]`) โ€” never class names tied to LinkedIn's layout. **Core Subpackage (`core/`):** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9d1d8107..14ddca04 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -124,6 +124,16 @@ When adding an entirely new MCP tool (e.g., `search_companies`): 4. Open a PR โ€” AI agents review first, then manual review 5. Don't squash commits on merge +## Scraping Philosophy: Minimize DOM Dependence + +This project favours **innerText extraction and URL navigation** over DOM selectors. LinkedIn's markup changes frequently โ€” class names, `data-` attributes, and component structure are unstable. Our scraping engine is deliberately built to survive those changes: + +- **Prefer `innerText`** over `querySelector` / DOM walking for data extraction. +- **Prefer URL navigation** (e.g. `/details/experience/`) over clicking UI elements. +- **When DOM access is unavoidable** (e.g. extracting `href` attributes that don't appear in innerText, finding a scrollable container), keep selectors minimal and generic. Favour tag + attribute patterns (`a[href*="/jobs/view/"]`) over class names (`.jobs-search-results-list`). +- **Never scope queries to layout-specific containers** like `.jobs-search-results-list` โ€” these break silently when LinkedIn redesigns. Use `main` as the broadest acceptable scope. +- **Document any DOM dependency** with a comment explaining why innerText/URL navigation isn't sufficient. + ## Code Style - **Commits:** conventional commits โ€” `type(scope): subject` (see [CLAUDE.md](CLAUDE.md) for details) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 9b01a0d7..1ad72454 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -119,6 +119,13 @@ async def scroll_job_sidebar( pause_time: Time to pause between scrolls (seconds) max_scrolls: Maximum number of scroll attempts """ + # Wait for at least one job card link to render before scrolling + try: + await page.wait_for_selector('a[href*="/jobs/view/"]', timeout=5000) + except PlaywrightTimeoutError: + logger.debug("No job card links found, skipping sidebar scroll") + return + scrolled = await page.evaluate( """async ({pauseTime, maxScrolls}) => { const link = document.querySelector('a[href*="/jobs/view/"]'); diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index acaf504e..6c27c375 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -65,6 +65,13 @@ _SORT_BY_MAP = {"date": "DD", "relevance": "R"} + +def _normalize_csv(value: str, mapping: dict[str, str]) -> str: + """Normalize a comma-separated filter value using the provided mapping.""" + parts = [v.strip() for v in value.split(",")] + return ",".join(mapping.get(p, p) for p in parts) + + _NOISE_MARKERS: list[re.Pattern[str]] = [ # Footer nav links: "About" immediately followed by "Accessibility" or "Talent Solutions" re.compile(r"^About\n+(?:Accessibility|Talent Solutions)", re.MULTILINE), @@ -441,10 +448,6 @@ def _build_job_search_url( if location: params += f"&location={quote_plus(location)}" - def _normalize_csv(value: str, mapping: dict[str, str]) -> str: - parts = [v.strip() for v in value.split(",")] - return ",".join(mapping.get(p, p) for p in parts) - if date_posted: params += f"&f_TPR={_normalize_csv(date_posted, _DATE_POSTED_MAP)}" if job_type: From 08c8643e6c8d9134327b56cf74858e9f85cfcaca Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 18:54:55 +0100 Subject: [PATCH 448/565] docs(scraping): explain DOM exception in _get_total_search_pages The pagination state element has display:none so innerText cannot capture it. Document why the class-based selector is necessary and that it degrades gracefully to max_pages if LinkedIn renames it. --- linkedin_mcp_server/scraping/extractor.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 6c27c375..b22882ba 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -413,6 +413,11 @@ async def _get_total_search_pages(self) -> int | None: Parses the "Page X of Y" text from ``.jobs-search-pagination__page-state``. Returns ``None`` when the element is absent or unparseable. + + NOTE: This is a deliberate DOM exception. The element has ``display: none`` + (screen-reader only), so the text never appears in ``innerText``. A class-based + selector is the only reliable way to read it. Gracefully returns ``None`` if + LinkedIn renames the class โ€” pagination just falls back to ``max_pages``. """ text = await self._page.evaluate( """() => { From 4ed371250f04e7b0009b5bab016a9545a5492eb0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 19:07:34 +0100 Subject: [PATCH 449/565] fix(tools): single-select filters, dead code, and pagination query Use direct .get() lookup for date_posted and sort_by (single-select filters). Remove unreachable _RATE_LIMITED_MSG check after early break. Query _get_total_search_pages only once per search to avoid repeated evaluate() calls when the element is absent. --- linkedin_mcp_server/scraping/extractor.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index b22882ba..39cbaa48 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -454,7 +454,7 @@ def _build_job_search_url( params += f"&location={quote_plus(location)}" if date_posted: - params += f"&f_TPR={_normalize_csv(date_posted, _DATE_POSTED_MAP)}" + params += f"&f_TPR={_DATE_POSTED_MAP.get(date_posted.strip(), date_posted)}" if job_type: params += f"&f_JT={_normalize_csv(job_type, _JOB_TYPE_MAP)}" if experience_level: @@ -464,7 +464,7 @@ def _build_job_search_url( if easy_apply: params += "&f_EA=true" if sort_by: - params += f"&sortBy={_normalize_csv(sort_by, _SORT_BY_MAP)}" + params += f"&sortBy={_SORT_BY_MAP.get(sort_by.strip(), sort_by)}" return f"https://www.linkedin.com/jobs/search/?{params}" @@ -514,6 +514,7 @@ async def search_jobs( seen_ids: set[str] = set() page_texts: list[str] = [] total_pages: int | None = None + total_pages_queried = False for page_num in range(max_pages): # Stop if we already know we've reached the last page @@ -538,8 +539,9 @@ async def search_jobs( # Navigation failed or rate-limited; skip ID extraction break - # Read total pages from pagination state (e.g. "Page 1 of 40") - if total_pages is None: + # Read total pages from pagination state (once only) + if not total_pages_queried: + total_pages_queried = True total_pages = await self._get_total_search_pages() if total_pages is not None: logger.debug("LinkedIn reports %d total pages", total_pages) @@ -556,8 +558,7 @@ async def search_jobs( seen_ids.add(jid) all_job_ids.append(jid) - if text != _RATE_LIMITED_MSG: - page_texts.append(text) + page_texts.append(text) except LinkedInScraperException: raise From 9fd33acd2cd9a8bf4bc41b80f64e9ef51b96ec99 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 19:42:44 +0100 Subject: [PATCH 450/565] fix(tools): URL-encode single-select filters, fix log indexing Apply quote_plus to date_posted and sort_by passthrough values to prevent malformed URLs from unexpected input. Use consistent 1-indexed page numbers in all debug log messages. --- linkedin_mcp_server/scraping/extractor.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 39cbaa48..1346af72 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -454,7 +454,8 @@ def _build_job_search_url( params += f"&location={quote_plus(location)}" if date_posted: - params += f"&f_TPR={_DATE_POSTED_MAP.get(date_posted.strip(), date_posted)}" + mapped = _DATE_POSTED_MAP.get(date_posted.strip(), date_posted) + params += f"&f_TPR={quote_plus(mapped)}" if job_type: params += f"&f_JT={_normalize_csv(job_type, _JOB_TYPE_MAP)}" if experience_level: @@ -464,7 +465,8 @@ def _build_job_search_url( if easy_apply: params += "&f_EA=true" if sort_by: - params += f"&sortBy={_SORT_BY_MAP.get(sort_by.strip(), sort_by)}" + mapped = _SORT_BY_MAP.get(sort_by.strip(), sort_by) + params += f"&sortBy={quote_plus(mapped)}" return f"https://www.linkedin.com/jobs/search/?{params}" @@ -521,7 +523,7 @@ async def search_jobs( if total_pages is not None and page_num >= total_pages: logger.debug( "Reached last page (%d of %d), stopping", - page_num, + page_num + 1, total_pages, ) break From 9b3955dd6f21b6e59c453dafa499ea15cabf935e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 19:50:52 +0100 Subject: [PATCH 451/565] fix(tools): log rate-limit retry outcome and missing scroll container Warn when search page rate-limit retry also fails. Add console.debug in scroll_job_sidebar when no scrollable container is found. --- linkedin_mcp_server/core/utils.py | 5 ++++- linkedin_mcp_server/scraping/extractor.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 1ad72454..f07b1f1b 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -142,7 +142,10 @@ async def scroll_job_sidebar( container = container.parentElement; } - if (!container || container === document.body) return 0; + if (!container || container === document.body) { + console.debug('[scroll_job_sidebar] no scrollable container found'); + return 0; + } let scrollCount = 0; for (let i = 0; i < maxScrolls; i++) { diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 1346af72..096bdde8 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -369,7 +369,10 @@ async def _extract_search_page(self, url: str) -> str: _RATE_LIMIT_RETRY_DELAY, ) await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) - return await self._extract_search_page_once(url) + result = await self._extract_search_page_once(url) + if result == _RATE_LIMITED_MSG: + logger.warning("Search page %s still rate-limited after retry", url) + return result except LinkedInScraperException: raise From ae40b0bb379f16439047de9150db3ac37491ce12 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 20:00:02 +0100 Subject: [PATCH 452/565] fix(tools): skip scroll without main, fix log message, harden test Skip sidebar scrolling when
is absent to avoid 5s timeout on edge-case pages. Fix off-by-one in total_pages log message. Add page count assertion to test_deduplication_across_pages. --- linkedin_mcp_server/scraping/extractor.py | 11 +++++------ tests/test_scraping.py | 3 ++- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 096bdde8..c09fb400 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -385,13 +385,16 @@ async def _extract_search_page_once(self, url: str) -> str: await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) await detect_rate_limit(self._page) + main_found = True try: await self._page.wait_for_selector("main", timeout=5000) except PlaywrightTimeoutError: logger.debug("No
element found on %s", url) + main_found = False await handle_modal_close(self._page) - await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) + if main_found: + await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) raw = await self._page.evaluate( """() => { @@ -524,11 +527,7 @@ async def search_jobs( for page_num in range(max_pages): # Stop if we already know we've reached the last page if total_pages is not None and page_num >= total_pages: - logger.debug( - "Reached last page (%d of %d), stopping", - page_num + 1, - total_pages, - ) + logger.debug("All %d pages fetched, stopping", total_pages) break if page_num > 0: diff --git a/tests/test_scraping.py b/tests/test_scraping.py index f098a910..2e9bf1d1 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -604,7 +604,7 @@ async def test_deduplication_across_pages(self, mock_page): "_extract_search_page", new_callable=AsyncMock, return_value="text", - ), + ) as mock_extract, patch.object( extractor, "_extract_job_ids", @@ -625,6 +625,7 @@ async def test_deduplication_across_pages(self, mock_page): result = await extractor.search_jobs("python", max_pages=2) assert result["job_ids"] == ["100", "200", "300"] + assert mock_extract.await_count == 2 async def test_early_stop_no_new_ids(self, mock_page): """Should stop early when a page yields no new job IDs.""" From fa4a37009a412440f2c43b0e76d6502e17d9b7fb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 20:15:04 +0100 Subject: [PATCH 453/565] fix(tools): preserve page text on zero-results search Append text to page_texts before breaking on no new IDs so the LLM can read LinkedIn's feedback (e.g. "No jobs found") instead of receiving empty sections. --- linkedin_mcp_server/scraping/extractor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index c09fb400..b591df9b 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -555,6 +555,7 @@ async def search_jobs( new_ids = [jid for jid in page_ids if jid not in seen_ids] if not new_ids: + page_texts.append(text) logger.debug("No new job IDs on page %d, stopping", page_num + 1) break From 0b5dc369b86048cdaaf38180c5350c49fabf498c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 20:57:46 +0100 Subject: [PATCH 454/565] test(scraping): Assert await_count in page texts test Add await_count == 2 assertion to test_page_texts_joined_with_separator matching the pattern already used in test_deduplication_across_pages. --- tests/test_scraping.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 2e9bf1d1..d4e85ab0 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -785,7 +785,7 @@ async def test_page_texts_joined_with_separator(self, mock_page): "_extract_search_page", new_callable=AsyncMock, side_effect=lambda url: next(text_pages), - ), + ) as mock_extract, patch.object( extractor, "_extract_job_ids", @@ -808,6 +808,7 @@ async def test_page_texts_joined_with_separator(self, mock_page): assert "\n---\n" in result["sections"]["search_results"] assert "Page 1 content" in result["sections"]["search_results"] assert "Page 2 content" in result["sections"]["search_results"] + assert mock_extract.await_count == 2 async def test_empty_results(self, mock_page): """Should handle empty results gracefully and skip ID extraction.""" From 81fb9afe1f7af705650b9b2a0a0f21ad33399a40 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 21:50:20 +0100 Subject: [PATCH 455/565] fix(scraping): Use textContent for hidden pagination element Switch from innerText to textContent in _get_total_search_pages so the "Page X of Y" text is readable regardless of CSS visibility. --- linkedin_mcp_server/scraping/extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index b591df9b..90bfed2f 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -430,7 +430,7 @@ async def _get_total_search_pages(self) -> int | None: const el = document.querySelector( '.jobs-search-pagination__page-state' ); - return el ? el.innerText.trim() : null; + return el ? el.textContent.trim() : null; }""" ) if not text: From 0236331a78f59e7282b3355a55976c745c9bb41a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 22:22:42 +0100 Subject: [PATCH 456/565] fix(scraping): Surface JS debug log, isolate pagination error - Replace console.debug in scroll_job_sidebar JS with sentinel return so the message is logged via Python logger instead - Wrap _get_total_search_pages in its own try/except to prevent an exception from discarding already-fetched page text and job IDs - Inline offset calculation into URL ternary for clarity --- linkedin_mcp_server/core/utils.py | 7 ++++--- linkedin_mcp_server/scraping/extractor.py | 19 +++++++++++++------ 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index f07b1f1b..1a54cc8f 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -143,8 +143,7 @@ async def scroll_job_sidebar( } if (!container || container === document.body) { - console.debug('[scroll_job_sidebar] no scrollable container found'); - return 0; + return -1; } let scrollCount = 0; @@ -159,7 +158,9 @@ async def scroll_job_sidebar( }""", {"pauseTime": pause_time, "maxScrolls": max_scrolls}, ) - if scrolled: + if scrolled == -1: + logger.debug("No scrollable container found for job sidebar") + elif scrolled: logger.debug("Scrolled job sidebar %d times", scrolled) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 90bfed2f..79a960bf 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -533,8 +533,11 @@ async def search_jobs( if page_num > 0: await asyncio.sleep(_NAV_DELAY) - offset = page_num * _PAGE_SIZE - url = base_url if page_num == 0 else f"{base_url}&start={offset}" + url = ( + base_url + if page_num == 0 + else f"{base_url}&start={page_num * _PAGE_SIZE}" + ) try: text = await self._extract_search_page(url) @@ -543,12 +546,16 @@ async def search_jobs( # Navigation failed or rate-limited; skip ID extraction break - # Read total pages from pagination state (once only) + # Read total pages from pagination state (once only, best-effort) if not total_pages_queried: total_pages_queried = True - total_pages = await self._get_total_search_pages() - if total_pages is not None: - logger.debug("LinkedIn reports %d total pages", total_pages) + try: + total_pages = await self._get_total_search_pages() + except Exception as e: + logger.debug("Could not read total pages: %s", e) + else: + if total_pages is not None: + logger.debug("LinkedIn reports %d total pages", total_pages) # Extract job IDs from hrefs (page is already loaded) page_ids = await self._extract_job_ids() From 7dd0e4e6ba112730f27cf82c17c8a110a94c3257 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 22:32:52 +0100 Subject: [PATCH 457/565] fix(scraping): Log zero-scroll and body-fallback edge cases - Add debug log when sidebar container is found but no new content loads (scrolled == 0) - Add debug log when
is absent and body fallback is used on search pages --- linkedin_mcp_server/core/utils.py | 2 ++ linkedin_mcp_server/scraping/extractor.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 1a54cc8f..2c70b453 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -162,6 +162,8 @@ async def scroll_job_sidebar( logger.debug("No scrollable container found for job sidebar") elif scrolled: logger.debug("Scrolled job sidebar %d times", scrolled) + else: + logger.debug("Job sidebar container found but no new content loaded") async def handle_modal_close(page: Page) -> bool: diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 79a960bf..2116df6d 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -402,6 +402,8 @@ async def _extract_search_page_once(self, url: str) -> str: return main ? main.innerText : document.body.innerText; }""" ) + if not main_found: + logger.debug("No
at evaluation time on %s, using body fallback", url) if not raw: return "" From 23dc25db84efa78bfeb579890302abeac02ce2fb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 22:51:01 +0100 Subject: [PATCH 458/565] fix(scraping): Disambiguate scroll sentinels and body fallback log - Use -2 sentinel for "job card link vanished" vs -1 for "no scrollable container" vs 0 for "no new content loaded" - Return {source, text} from search page JS evaluate so the body fallback log fires based on actual DOM state, not the pre-evaluate wait_for_selector flag --- linkedin_mcp_server/core/utils.py | 6 ++++-- linkedin_mcp_server/scraping/extractor.py | 9 ++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 2c70b453..09ad8544 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -129,7 +129,7 @@ async def scroll_job_sidebar( scrolled = await page.evaluate( """async ({pauseTime, maxScrolls}) => { const link = document.querySelector('a[href*="/jobs/view/"]'); - if (!link) return 0; + if (!link) return -2; let container = link.parentElement; while (container && container !== document.body) { @@ -158,7 +158,9 @@ async def scroll_job_sidebar( }""", {"pauseTime": pause_time, "maxScrolls": max_scrolls}, ) - if scrolled == -1: + if scrolled == -2: + logger.debug("Job card link disappeared before evaluate, skipping scroll") + elif scrolled == -1: logger.debug("No scrollable container found for job sidebar") elif scrolled: logger.debug("Scrolled job sidebar %d times", scrolled) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 2116df6d..e5ede18c 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -396,13 +396,16 @@ async def _extract_search_page_once(self, url: str) -> str: if main_found: await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) - raw = await self._page.evaluate( + raw_result = await self._page.evaluate( """() => { const main = document.querySelector('main'); - return main ? main.innerText : document.body.innerText; + return main + ? { source: 'main', text: main.innerText } + : { source: 'body', text: document.body.innerText }; }""" ) - if not main_found: + raw = raw_result["text"] + if raw_result["source"] == "body": logger.debug("No
at evaluation time on %s, using body fallback", url) if not raw: From 4f110b1108b988e7c482d40464191be575d42751 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 23:03:36 +0100 Subject: [PATCH 459/565] fix(scraping): URL guard for stale DOM, test zero-ID page text - Add URL sanity check before _extract_job_ids to prevent extracting IDs from a stale page after a swallowed navigation failure - Add test_no_ids_on_first_page_captures_text to pin the behavior where non-empty text with zero job IDs is returned in sections - Change total_pages mock to None in test_pagination_uses_fixed_page_size since max_pages=2 caps the loop before total_pages is relevant --- linkedin_mcp_server/scraping/extractor.py | 10 ++++++ tests/test_scraping.py | 38 ++++++++++++++++++++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index e5ede18c..cfa3d78c 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -563,6 +563,16 @@ async def search_jobs( logger.debug("LinkedIn reports %d total pages", total_pages) # Extract job IDs from hrefs (page is already loaded) + if not self._page.url.startswith( + "https://www.linkedin.com/jobs/search/" + ): + logger.debug( + "Unexpected page URL after extraction: %s โ€” " + "skipping job ID extraction", + self._page.url, + ) + page_texts.append(text) + break page_ids = await self._extract_job_ids() new_ids = [jid for jid in page_ids if jid not in seen_ids] diff --git a/tests/test_scraping.py b/tests/test_scraping.py index d4e85ab0..029508f7 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -524,6 +524,10 @@ async def test_scrape_job(self, mock_page): class TestSearchJobs: """Tests for search_jobs with job ID extraction and pagination.""" + @pytest.fixture(autouse=True) + def _set_search_url(self, mock_page): + mock_page.url = "https://www.linkedin.com/jobs/search/?keywords=python" + async def test_returns_job_ids(self, mock_page): """search_jobs should return a job_ids list extracted from hrefs.""" extractor = LinkedInExtractor(mock_page) @@ -581,7 +585,7 @@ async def mock_extract(url): extractor, "_get_total_search_pages", new_callable=AsyncMock, - return_value=5, + return_value=None, ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -844,6 +848,38 @@ async def test_empty_results(self, mock_page): # Empty text should skip ID extraction to avoid stale DOM mock_ids.assert_not_awaited() + async def test_no_ids_on_first_page_captures_text(self, mock_page): + """Non-empty text with zero job IDs should be returned in sections.""" + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="No matching jobs found", + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=[], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("xyzzy123", max_pages=1) + + assert result["job_ids"] == [] + assert result["sections"]["search_results"] == "No matching jobs found" + async def test_rate_limited_skips_ids_and_text(self, mock_page): """Rate-limited pages should yield no IDs or text.""" extractor = LinkedInExtractor(mock_page) From 1a647b62ab32cb6466f2cb4934cb967bbe4fd989 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 23:17:12 +0100 Subject: [PATCH 460/565] fix(scraping): Move displaced comment, log race condition, test URL guard - Move _NOISE_MARKERS comment to directly precede the list it describes - Log when
appears after wait_for_selector timeout but before evaluate (sidebar scroll skipped on late-appearing element) - Add test_url_redirect_skips_id_extraction to exercise the URL sanity guard that prevents extracting IDs from a stale/redirect page --- linkedin_mcp_server/scraping/extractor.py | 9 ++++-- tests/test_scraping.py | 34 +++++++++++++++++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index cfa3d78c..9208712a 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -29,8 +29,6 @@ # Returned as section text when LinkedIn rate-limits the page _RATE_LIMITED_MSG = "[Rate limited] LinkedIn blocked this section. Try again later or request fewer sections." -# Patterns that mark the start of LinkedIn page chrome (sidebar/footer). -# Everything from the earliest match onwards is stripped. # LinkedIn shows 25 results per page _PAGE_SIZE = 25 @@ -72,6 +70,8 @@ def _normalize_csv(value: str, mapping: dict[str, str]) -> str: return ",".join(mapping.get(p, p) for p in parts) +# Patterns that mark the start of LinkedIn page chrome (sidebar/footer). +# Everything from the earliest match onwards is stripped. _NOISE_MARKERS: list[re.Pattern[str]] = [ # Footer nav links: "About" immediately followed by "Accessibility" or "Talent Solutions" re.compile(r"^About\n+(?:Accessibility|Talent Solutions)", re.MULTILINE), @@ -407,6 +407,11 @@ async def _extract_search_page_once(self, url: str) -> str: raw = raw_result["text"] if raw_result["source"] == "body": logger.debug("No
at evaluation time on %s, using body fallback", url) + elif not main_found: + logger.debug( + "
appeared after wait timeout on %s, sidebar scroll was skipped", + url, + ) if not raw: return "" diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 029508f7..4225953a 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -880,6 +880,40 @@ async def test_no_ids_on_first_page_captures_text(self, mock_page): assert result["job_ids"] == [] assert result["sections"]["search_results"] == "No matching jobs found" + async def test_url_redirect_skips_id_extraction(self, mock_page): + """Unexpected page URL should skip ID extraction but capture text.""" + extractor = LinkedInExtractor(mock_page) + mock_page.url = "https://www.linkedin.com/uas/login" + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value="Login page content", + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=[], + ) as mock_ids, + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=2) + + mock_ids.assert_not_awaited() + assert result["job_ids"] == [] + assert result["sections"]["search_results"] == "Login page content" + async def test_rate_limited_skips_ids_and_text(self, mock_page): """Rate-limited pages should yield no IDs or text.""" extractor = LinkedInExtractor(mock_page) From 1ba4039e5102e636e714f1b2d7c63cb2e133eb8b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 23:30:48 +0100 Subject: [PATCH 461/565] test(scraping): Assert total_pages queried only once Capture _get_total_search_pages mock in test_stops_at_total_pages and verify await_count == 1 to pin the query-once optimization. --- tests/test_scraping.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 4225953a..09b32ed7 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -690,7 +690,7 @@ async def test_stops_at_total_pages(self, mock_page): "_get_total_search_pages", new_callable=AsyncMock, return_value=2, - ), + ) as mock_total_pages, patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", new_callable=AsyncMock, @@ -700,6 +700,7 @@ async def test_stops_at_total_pages(self, mock_page): # Should only visit 2 pages despite max_pages=10 assert mock_extract.await_count == 2 + assert mock_total_pages.await_count == 1 assert result["job_ids"] == ["100", "200"] async def test_zero_max_pages_fetches_nothing(self, mock_page): From 6bc1e9623efd59be95b5e100667e4e709ad63ac7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 5 Mar 2026 23:56:59 +0100 Subject: [PATCH 462/565] docs(AGENTS): add section for verifying bug reports with detailed curl commands --- AGENTS.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index 48e79ebf..2be713b0 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -129,6 +129,28 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec - Types: feat, fix, docs, style, refactor, test, chore, perf, ci - Keep subject <50 chars, imperative mood +## Verifying Bug Reports + +Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport, then call the tool via curl: + +```bash +# Start server +uv run -m linkedin_mcp_server --transport streamable-http --log-level DEBUG + +# Initialize MCP session (grab Mcp-Session-Id from response headers) +curl -s -D /tmp/mcp-headers -X POST http://127.0.0.1:8000/mcp \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -d '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2025-03-26","capabilities":{},"clientInfo":{"name":"test","version":"1.0"}}}' + +# Call a tool (use Mcp-Session-Id from previous response) +curl -s -X POST http://127.0.0.1:8000/mcp \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "Mcp-Session-Id: " \ + -d '{"jsonrpc":"2.0","id":2,"method":"tools/call","params":{"name":"get_person_profile","arguments":{"linkedin_username":"williamhgates","sections":"posts"}}}' +``` + ## Important Development Notes ### Development Workflow From b235a46a1dad41e2ced88a40de1c61fdf1029b0f Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 6 Mar 2026 00:03:24 +0100 Subject: [PATCH 463/565] Update AGENTS.md Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com> --- AGENTS.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 2be713b0..284f25d3 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -143,11 +143,14 @@ curl -s -D /tmp/mcp-headers -X POST http://127.0.0.1:8000/mcp \ -H "Accept: application/json, text/event-stream" \ -d '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"protocolVersion":"2025-03-26","capabilities":{},"clientInfo":{"name":"test","version":"1.0"}}}' +# Extract the session ID from saved headers +SESSION_ID=$(grep -i 'Mcp-Session-Id' /tmp/mcp-headers | awk '{print $2}' | tr -d '\r') + # Call a tool (use Mcp-Session-Id from previous response) curl -s -X POST http://127.0.0.1:8000/mcp \ -H "Content-Type: application/json" \ -H "Accept: application/json, text/event-stream" \ - -H "Mcp-Session-Id: " \ + -H "Mcp-Session-Id: $SESSION_ID" \ -d '{"jsonrpc":"2.0","id":2,"method":"tools/call","params":{"name":"get_person_profile","arguments":{"linkedin_username":"williamhgates","sections":"posts"}}}' ``` From e8e8eb9ddf402bb074f4d542c9fbaa5d95024a63 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Fri, 6 Mar 2026 00:03:43 +0100 Subject: [PATCH 464/565] Update AGENTS.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- AGENTS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 284f25d3..1d438ead 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -131,7 +131,7 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown sec ## Verifying Bug Reports -Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport, then call the tool via curl: +Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: ```bash # Start server From b76460d6307a7c39f87db1e80613c445644209f7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 6 Mar 2026 00:19:57 +0100 Subject: [PATCH 465/565] fix(scraping): Wait for activity feed content before extracting Activity feed pages lazy-load post content after tab headers render. Add wait_for_function check and slower scroll params for /recent-activity/ URLs so posts section returns actual content instead of just tab headers. Resolves: #201 --- linkedin_mcp_server/scraping/extractor.py | 20 ++++- tests/test_scraping.py | 99 +++++++++++++++++++++++ 2 files changed, 118 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 9208712a..7771a7c7 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -145,8 +145,26 @@ async def _extract_page_once(self, url: str) -> str: # Dismiss any modals blocking content await handle_modal_close(self._page) + # Activity feed pages lazy-load post content after the tab header + is_activity = "/recent-activity/" in url + if is_activity: + try: + await self._page.wait_for_function( + """() => { + const main = document.querySelector('main'); + if (!main) return false; + return main.innerText.length > 200; + }""", + timeout=10000, + ) + except PlaywrightTimeoutError: + logger.debug("Activity feed content did not appear on %s", url) + # Scroll to trigger lazy loading - await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) + if is_activity: + await scroll_to_bottom(self._page, pause_time=1.0, max_scrolls=10) + else: + await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) # Extract text from main content area raw = await self._page.evaluate( diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 09b32ed7..2090dbbb 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -992,3 +992,102 @@ def test_real_footer_with_languages(self): "Select language\nEnglish (English)\nDeutsch (German)" ) assert strip_linkedin_noise(text) == "Company info" + + +class TestActivityFeedExtraction: + """Tests for activity page detection and wait behavior in _extract_page_once.""" + + async def test_activity_page_waits_for_content_and_uses_slow_scroll( + self, mock_page + ): + """Activity URLs should call wait_for_function and use slower scroll params.""" + mock_page.evaluate = AsyncMock(return_value="Post content " * 50) + mock_page.wait_for_function = AsyncMock() + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ) as mock_scroll, + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor._extract_page_once( + "https://www.linkedin.com/in/billgates/recent-activity/all/" + ) + + mock_page.wait_for_function.assert_awaited_once() + mock_scroll.assert_awaited_once() + _, kwargs = mock_scroll.call_args + assert kwargs["pause_time"] == 1.0 + assert kwargs["max_scrolls"] == 10 + assert len(result) > 200 + + async def test_non_activity_page_skips_wait_and_uses_fast_scroll(self, mock_page): + """Non-activity URLs should not call wait_for_function and use fast scroll.""" + mock_page.evaluate = AsyncMock(return_value="Profile text") + mock_page.wait_for_function = AsyncMock() + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ) as mock_scroll, + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + await extractor._extract_page_once( + "https://www.linkedin.com/in/billgates/details/experience/" + ) + + mock_page.wait_for_function.assert_not_awaited() + mock_scroll.assert_awaited_once() + _, kwargs = mock_scroll.call_args + assert kwargs["pause_time"] == 0.5 + assert kwargs["max_scrolls"] == 5 + + async def test_activity_page_timeout_proceeds_gracefully(self, mock_page): + """When activity feed content never loads, extraction proceeds with available text.""" + from patchright.async_api import TimeoutError as PlaywrightTimeoutError + + tab_headers = "All activity\nPosts\nComments\nVideos\nImages" + mock_page.evaluate = AsyncMock(return_value=tab_headers) + mock_page.wait_for_function = AsyncMock( + side_effect=PlaywrightTimeoutError("Timeout") + ) + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor._extract_page_once( + "https://www.linkedin.com/in/billgates/recent-activity/all/" + ) + + # Should return whatever text is available, not crash + assert result == tab_headers From 420ca0089241932f764103da089ba8913fdd18bb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 6 Mar 2026 00:29:35 +0100 Subject: [PATCH 466/565] chore: Bump version to 4.2.0 --- pyproject.toml | 2 +- uv.lock | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 92583f9c..fba315ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.1.2" +version = "4.2.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index e144ed14..af1f7185 100644 --- a/uv.lock +++ b/uv.lock @@ -212,10 +212,16 @@ sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db wheels = [ { url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" }, { url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" }, + { url = "https://files.pythonhosted.org/packages/03/c4/8a1b580875303500a9c12b9e0af58cb82e47f5bcf888c2457742a138273c/caio-0.9.25-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:4fa69eba47e0f041b9d4f336e2ad40740681c43e686b18b191b6c5f4c5544bfb", size = 81502, upload-time = "2026-03-04T22:08:22.381Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/0fe770b8ffc8362c48134d1592d653a81a3d8748d764bec33864db36319d/caio-0.9.25-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6bebf6f079f1341d19f7386db9b8b1f07e8cc15ae13bfdaff573371ba0575d69", size = 80200, upload-time = "2026-03-04T22:08:23.382Z" }, { url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" }, { url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" }, + { url = "https://files.pythonhosted.org/packages/9c/12/c39ae2a4037cb10ad5eb3578eb4d5f8c1a2575c62bba675f3406b7ef0824/caio-0.9.25-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:1a177d4777141b96f175fe2c37a3d96dec7911ed9ad5f02bac38aaa1c936611f", size = 81523, upload-time = "2026-03-04T22:08:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/22/59/f8f2e950eb4f1a5a3883e198dca514b9d475415cb6cd7b78b9213a0dd45a/caio-0.9.25-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9ed3cfb28c0e99fec5e208c934e5c157d0866aa9c32aa4dc5e9b6034af6286b7", size = 80243, upload-time = "2026-03-04T22:08:26.449Z" }, { url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" }, { url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" }, + { url = "https://files.pythonhosted.org/packages/87/a4/e534cf7d2d0e8d880e25dd61e8d921ffcfe15bd696734589826f5a2df727/caio-0.9.25-cp314-cp314-manylinux_2_34_aarch64.whl", hash = "sha256:628a630eb7fb22381dd8e3c8ab7f59e854b9c806639811fc3f4310c6bd711d79", size = 81565, upload-time = "2026-03-04T22:08:27.483Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ed/bf81aeac1d290017e5e5ac3e880fd56ee15e50a6d0353986799d1bc5cfd5/caio-0.9.25-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:0ba16aa605ccb174665357fc729cf500679c2d94d5f1458a6f0d5ca48f2060a7", size = 80071, upload-time = "2026-03-04T22:08:28.751Z" }, { url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" }, ] @@ -945,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.1.2" +version = "4.2.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 26f37f5e7dfe11b2178070cf21c3473049f1c012 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2026 23:32:17 +0000 Subject: [PATCH 467/565] chore: update manifest.json and docker-compose.yml to v4.2.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 5436362c..7b85aceb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.1.2 + image: stickerdaniel/linkedin-mcp-server:4.2.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index f73655d5..78e54322 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.1.2", + "version": "4.2.0", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.1.2", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.2.0", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.1.2" + "stickerdaniel/linkedin-mcp-server:4.2.0" ] } }, From bb8448cfd0cdfae8f63f9f48ca176d6930b9f7fa Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 16:19:47 +0100 Subject: [PATCH 468/565] feat(scraping): add compact references --- AGENTS.md | 21 +- CONTRIBUTING.md | 6 + README.md | 2 + docs/docker-hub.md | 1 + linkedin_mcp_server/scraping/extractor.py | 302 +++++++++++---- linkedin_mcp_server/scraping/link_metadata.py | 359 ++++++++++++++++++ linkedin_mcp_server/tools/company.py | 18 +- linkedin_mcp_server/tools/job.py | 6 +- linkedin_mcp_server/tools/person.py | 4 +- tests/test_link_metadata.py | 194 ++++++++++ tests/test_scraping.py | 247 +++++++++--- tests/test_tools.py | 10 +- 12 files changed, 1030 insertions(+), 140 deletions(-) create mode 100644 linkedin_mcp_server/scraping/link_metadata.py create mode 100644 tests/test_link_metadata.py diff --git a/AGENTS.md b/AGENTS.md index 1d438ead..a0bed729 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -8,7 +8,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Use `uv` for dependency management: `uv sync` (installs all dependencies) - Development dependencies: `uv sync --group dev` -- Bump version: `uv version --bump minor` (or `major`, `patch`) - this is the **only manual step** for a release. The GitHub Actions release workflow (`.github/workflows/release.yml`) automatically handles: manifest.json/docker-compose.yml version updates, git tag, Docker build & push, DXT extension, GitHub release, and PyPI publish. After the workflow completes, manually file a PR in the MCP registry to update the version. +- Bump version: see [Release Process](#release-process) below - Install browser: `uv run patchright install chromium` - Run server locally: `uv run -m linkedin_mcp_server --no-headless` - Run via uvx (PyPI): `uvx linkedin-scraper-mcp` @@ -65,7 +65,13 @@ This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assis **Tool Return Format:** -All scraping tools return: `{url, sections: {name: raw_text}}`. When unknown section names are provided, an `unknown_sections: [name, ...]` key is also included. +All scraping tools return: `{url, sections: {name: raw_text}}`. + +Tools may also include: + +- `references: {section_name: [{kind, url, text?, context?}, ...]}` โ€” compact typed link targets for graph expansion. LinkedIn URLs are relative paths such as `/in/stickerdaniel/`; external URLs remain absolute. +- `unknown_sections: [name, ...]` when unknown section names were passed. +- `job_ids: [id, ...]` for `search_jobs`. **Scraping Architecture (`scraping/`):** @@ -154,6 +160,17 @@ curl -s -X POST http://127.0.0.1:8000/mcp \ -d '{"jsonrpc":"2.0","id":2,"method":"tools/call","params":{"name":"get_person_profile","arguments":{"linkedin_username":"williamhgates","sections":"posts"}}}' ``` +## Release Process + +```bash +git checkout main && git pull +uv version --bump minor # or: major, patch โ€” updates pyproject.toml AND uv.lock +gt create -m "chore: Bump version to X.Y.Z" +gt submit # merge PR to trigger release workflow +``` + +After the workflow completes, file a PR in the MCP registry to update the version. + ## Important Development Notes ### Development Workflow diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 14ddca04..2678943a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,10 +54,16 @@ for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): ```python {"url": str, "sections": {name: raw_text}} +# Optional compact link metadata: +{"url": str, "sections": {name: raw_text}, "references": {section: [{kind, url, text?, context?}, ...]}} # When unknown section names are provided: {"url": str, "sections": {name: raw_text}, "unknown_sections": [name, ...]} +# search_jobs also returns: +{"url": str, "sections": {name: raw_text}, "job_ids": [id, ...]} ``` +`sections` remains the main readable payload. `references` is a compact supplement for entity/article traversal. LinkedIn references are emitted as relative paths to minimize token use. + ## Checklist: Adding a New Section When adding a section to an existing tool (e.g., adding "certifications" to `get_person_profile`): diff --git a/README.md b/README.md index d3bd6ca3..188a0da5 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,8 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | +Tool responses keep readable `sections` text and may also include a compact `references` map keyed by section. Each reference includes a typed target, a relative LinkedIn path (or absolute external URL), and a short label/context when available. + > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 2a288bb7..691b0fc2 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -11,6 +11,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A - **People Search**: Search for people by keywords and location - **Person Posts**: Get recent activity/posts from a person's profile - **Company Posts**: Get recent posts from a company's LinkedIn feed +- **Compact References**: Return typed per-section links alongside readable text without shipping full-page markdown ## Quick Start diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 7771a7c7..66b43a2c 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -1,6 +1,7 @@ """Core extraction engine using innerText instead of DOM selectors.""" import asyncio +from dataclasses import dataclass import logging import re from typing import Any @@ -15,6 +16,7 @@ scroll_job_sidebar, scroll_to_bottom, ) +from linkedin_mcp_server.scraping.link_metadata import Reference, build_references from .fields import COMPANY_SECTIONS, PERSON_SECTIONS @@ -81,8 +83,27 @@ def _normalize_csv(value: str, mapping: dict[str, str]) -> str: re.compile(r"^Explore premium profiles$", re.MULTILINE), # InMail upsell in contact info overlay re.compile(r"^Get up to .+ replies when you message with InMail$", re.MULTILINE), + # Footer nav links in profile/posts pages + re.compile(r"^Careers$", re.MULTILINE), + re.compile(r"^Privacy & Terms$", re.MULTILINE), + re.compile(r"^Questions\?$", re.MULTILINE), + re.compile(r"^Select language$", re.MULTILINE), ] +_NOISE_LINES: list[re.Pattern[str]] = [ + re.compile(r"^(?:Play|Pause|Playback speed|Turn fullscreen on|Fullscreen)$"), + re.compile(r"^(?:Show captions|Close modal window|Media player modal window)$"), + re.compile(r"^(?:Loaded:.*|Remaining time.*|Stream Type.*)$"), +] + + +@dataclass +class ExtractedSection: + """Text and compact references extracted from a loaded LinkedIn section.""" + + text: str + references: list[Reference] + def strip_linkedin_noise(text: str) -> str: """Remove LinkedIn page chrome (footer, sidebar recommendations) from innerText. @@ -95,7 +116,13 @@ def strip_linkedin_noise(text: str) -> str: if match and match.start() < earliest: earliest = match.start() - return text[:earliest].strip() + cleaned = text[:earliest].strip() + filtered_lines = [ + line + for line in cleaned.splitlines() + if not any(pattern.match(line.strip()) for pattern in _NOISE_LINES) + ] + return "\n".join(filtered_lines).strip() class LinkedInExtractor: @@ -104,7 +131,11 @@ class LinkedInExtractor: def __init__(self, page: Page): self._page = page - async def extract_page(self, url: str) -> str: + async def extract_page( + self, + url: str, + section_name: str | None = None, + ) -> ExtractedSection: """Navigate to a URL, scroll to load lazy content, and extract innerText. Retries once after a backoff when the page returns only LinkedIn chrome @@ -116,22 +147,26 @@ async def extract_page(self, url: str) -> str: Returns empty string for unexpected non-domain failures (error isolation). """ try: - result = await self._extract_page_once(url) - if result != _RATE_LIMITED_MSG: + result = await self._extract_page_once(url, section_name) + if result.text != _RATE_LIMITED_MSG: return result # Retry once after backoff logger.info("Retrying %s after %.0fs backoff", url, _RATE_LIMIT_RETRY_DELAY) await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) - return await self._extract_page_once(url) + return await self._extract_page_once(url, section_name) except LinkedInScraperException: raise except Exception as e: logger.warning("Failed to extract page %s: %s", url, e) - return "" + return ExtractedSection(text="", references=[]) - async def _extract_page_once(self, url: str) -> str: + async def _extract_page_once( + self, + url: str, + section_name: str | None = None, + ) -> ExtractedSection: """Single attempt to navigate, scroll, and extract innerText.""" await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) await detect_rate_limit(self._page) @@ -167,24 +202,27 @@ async def _extract_page_once(self, url: str) -> str: await scroll_to_bottom(self._page, pause_time=0.5, max_scrolls=5) # Extract text from main content area - raw = await self._page.evaluate( - """() => { - const main = document.querySelector('main'); - return main ? main.innerText : document.body.innerText; - }""" - ) + raw_result = await self._extract_root_content(["main"]) + raw = raw_result["text"] if not raw: - return "" + return ExtractedSection(text="", references=[]) cleaned = strip_linkedin_noise(raw) if not cleaned and raw.strip(): logger.warning( "Page %s returned only LinkedIn chrome (likely rate-limited)", url ) - return _RATE_LIMITED_MSG - return cleaned + return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + return ExtractedSection( + text=cleaned, + references=build_references(raw_result["references"], section_name or ""), + ) - async def _extract_overlay(self, url: str) -> str: + async def _extract_overlay( + self, + url: str, + section_name: str | None = None, + ) -> ExtractedSection: """Extract content from an overlay/modal page (e.g. contact info). LinkedIn renders contact info as a native element. @@ -194,8 +232,8 @@ async def _extract_overlay(self, url: str) -> str: chrome (noise), mirroring `extract_page` behavior. """ try: - result = await self._extract_overlay_once(url) - if result != _RATE_LIMITED_MSG: + result = await self._extract_overlay_once(url, section_name) + if result.text != _RATE_LIMITED_MSG: return result logger.info( @@ -204,15 +242,19 @@ async def _extract_overlay(self, url: str) -> str: _RATE_LIMIT_RETRY_DELAY, ) await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) - return await self._extract_overlay_once(url) + return await self._extract_overlay_once(url, section_name) except LinkedInScraperException: raise except Exception as e: logger.warning("Failed to extract overlay %s: %s", url, e) - return "" + return ExtractedSection(text="", references=[]) - async def _extract_overlay_once(self, url: str) -> str: + async def _extract_overlay_once( + self, + url: str, + section_name: str | None = None, + ) -> ExtractedSection: """Single attempt to extract content from an overlay/modal page.""" await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) await detect_rate_limit(self._page) @@ -229,27 +271,24 @@ async def _extract_overlay_once(self, url: str) -> str: # overlay *is* a dialog/modal. Dismissing it would destroy the # content before the JS evaluation below can read it. - raw = await self._page.evaluate( - """() => { - const dialog = document.querySelector('dialog[open]'); - if (dialog) return dialog.innerText.trim(); - const modal = document.querySelector('.artdeco-modal__content'); - if (modal) return modal.innerText.trim(); - const main = document.querySelector('main'); - return main ? main.innerText.trim() : document.body.innerText.trim(); - }""" + raw_result = await self._extract_root_content( + ["dialog[open]", ".artdeco-modal__content", "main"], ) + raw = raw_result["text"] if not raw: - return "" + return ExtractedSection(text="", references=[]) cleaned = strip_linkedin_noise(raw) if not cleaned and raw.strip(): logger.warning( "Overlay %s returned only LinkedIn chrome (likely rate-limited)", url, ) - return _RATE_LIMITED_MSG - return cleaned + return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + return ExtractedSection( + text=cleaned, + references=build_references(raw_result["references"], section_name or ""), + ) async def scrape_person(self, username: str, requested: set[str]) -> dict[str, Any]: """Scrape a person profile with configurable sections. @@ -260,6 +299,7 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A requested = requested | {"main_profile"} base_url = f"https://www.linkedin.com/in/{username}" sections: dict[str, str] = {} + references: dict[str, list[Reference]] = {} first = True for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): @@ -273,21 +313,28 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A url = base_url + suffix try: if is_overlay: - text = await self._extract_overlay(url) + extracted = await self._extract_overlay( + url, section_name=section_name + ) else: - text = await self.extract_page(url) + extracted = await self.extract_page(url, section_name=section_name) - if text: - sections[section_name] = text + if extracted.text: + sections[section_name] = extracted.text + if extracted.references: + references[section_name] = extracted.references except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - return { + result: dict[str, Any] = { "url": f"{base_url}/", "sections": sections, } + if references: + result["references"] = references + return result async def scrape_company( self, company_name: str, requested: set[str] @@ -300,6 +347,7 @@ async def scrape_company( requested = requested | {"about"} base_url = f"https://www.linkedin.com/company/{company_name}" sections: dict[str, str] = {} + references: dict[str, list[Reference]] = {} first = True for section_name, (suffix, is_overlay) in COMPANY_SECTIONS.items(): @@ -313,21 +361,28 @@ async def scrape_company( url = base_url + suffix try: if is_overlay: - text = await self._extract_overlay(url) + extracted = await self._extract_overlay( + url, section_name=section_name + ) else: - text = await self.extract_page(url) + extracted = await self.extract_page(url, section_name=section_name) - if text: - sections[section_name] = text + if extracted.text: + sections[section_name] = extracted.text + if extracted.references: + references[section_name] = extracted.references except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) - return { + result: dict[str, Any] = { "url": f"{base_url}/", "sections": sections, } + if references: + result["references"] = references + return result async def scrape_job(self, job_id: str) -> dict[str, Any]: """Scrape a single job posting. @@ -336,16 +391,22 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: {url, sections: {name: text}} """ url = f"https://www.linkedin.com/jobs/view/{job_id}/" - text = await self.extract_page(url) + extracted = await self.extract_page(url, section_name="job_posting") sections: dict[str, str] = {} - if text: - sections["job_posting"] = text + references: dict[str, list[Reference]] = {} + if extracted.text: + sections["job_posting"] = extracted.text + if extracted.references: + references["job_posting"] = extracted.references - return { + result: dict[str, Any] = { "url": url, "sections": sections, } + if references: + result["references"] = references + return result async def _extract_job_ids(self) -> list[str]: """Extract unique job IDs from job card links on the current page. @@ -369,7 +430,11 @@ async def _extract_job_ids(self) -> list[str]: }""" ) - async def _extract_search_page(self, url: str) -> str: + async def _extract_search_page( + self, + url: str, + section_name: str, + ) -> ExtractedSection: """Extract innerText from a job search page with soft rate-limit retry. Mirrors the noise-only detection and single-retry behavior of @@ -377,8 +442,8 @@ async def _extract_search_page(self, url: str) -> str: ``_RATE_LIMITED_MSG`` sentinel instead of silent empty results. """ try: - result = await self._extract_search_page_once(url) - if result != _RATE_LIMITED_MSG: + result = await self._extract_search_page_once(url, section_name) + if result.text != _RATE_LIMITED_MSG: return result logger.info( @@ -387,8 +452,8 @@ async def _extract_search_page(self, url: str) -> str: _RATE_LIMIT_RETRY_DELAY, ) await asyncio.sleep(_RATE_LIMIT_RETRY_DELAY) - result = await self._extract_search_page_once(url) - if result == _RATE_LIMITED_MSG: + result = await self._extract_search_page_once(url, section_name) + if result.text == _RATE_LIMITED_MSG: logger.warning("Search page %s still rate-limited after retry", url) return result @@ -396,9 +461,13 @@ async def _extract_search_page(self, url: str) -> str: raise except Exception as e: logger.warning("Failed to extract search page %s: %s", url, e) - return "" + return ExtractedSection(text="", references=[]) - async def _extract_search_page_once(self, url: str) -> str: + async def _extract_search_page_once( + self, + url: str, + section_name: str = "", + ) -> ExtractedSection: """Single attempt to navigate, scroll sidebar, and extract innerText.""" await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) await detect_rate_limit(self._page) @@ -414,14 +483,7 @@ async def _extract_search_page_once(self, url: str) -> str: if main_found: await scroll_job_sidebar(self._page, pause_time=0.5, max_scrolls=5) - raw_result = await self._page.evaluate( - """() => { - const main = document.querySelector('main'); - return main - ? { source: 'main', text: main.innerText } - : { source: 'body', text: document.body.innerText }; - }""" - ) + raw_result = await self._extract_root_content(["main"]) raw = raw_result["text"] if raw_result["source"] == "body": logger.debug("No
at evaluation time on %s, using body fallback", url) @@ -432,15 +494,18 @@ async def _extract_search_page_once(self, url: str) -> str: ) if not raw: - return "" + return ExtractedSection(text="", references=[]) cleaned = strip_linkedin_noise(raw) if not cleaned and raw.strip(): logger.warning( "Search page %s returned only LinkedIn chrome (likely rate-limited)", url, ) - return _RATE_LIMITED_MSG - return cleaned + return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + return ExtractedSection( + text=cleaned, + references=build_references(raw_result["references"], section_name), + ) async def _get_total_search_pages(self) -> int | None: """Read total page count from LinkedIn's pagination state element. @@ -549,6 +614,7 @@ async def search_jobs( all_job_ids: list[str] = [] seen_ids: set[str] = set() page_texts: list[str] = [] + page_references: list[Reference] = [] total_pages: int | None = None total_pages_queried = False @@ -568,9 +634,11 @@ async def search_jobs( ) try: - text = await self._extract_search_page(url) + extracted = await self._extract_search_page( + url, section_name="search_results" + ) - if not text or text == _RATE_LIMITED_MSG: + if not extracted.text or extracted.text == _RATE_LIMITED_MSG: # Navigation failed or rate-limited; skip ID extraction break @@ -594,13 +662,13 @@ async def search_jobs( "skipping job ID extraction", self._page.url, ) - page_texts.append(text) + page_texts.append(extracted.text) break page_ids = await self._extract_job_ids() new_ids = [jid for jid in page_ids if jid not in seen_ids] if not new_ids: - page_texts.append(text) + page_texts.append(extracted.text) logger.debug("No new job IDs on page %d, stopping", page_num + 1) break @@ -608,7 +676,9 @@ async def search_jobs( seen_ids.add(jid) all_job_ids.append(jid) - page_texts.append(text) + page_texts.append(extracted.text) + if extracted.references: + page_references.extend(extracted.references) except LinkedInScraperException: raise @@ -616,13 +686,23 @@ async def search_jobs( logger.warning("Error on search page %d: %s", page_num + 1, e) break - return { + result: dict[str, Any] = { "url": base_url, "sections": {"search_results": "\n---\n".join(page_texts)} if page_texts else {}, "job_ids": all_job_ids, } + if page_references: + deduped_references: list[Reference] = [] + seen_reference_urls: set[str] = set() + for reference in page_references: + if reference["url"] in seen_reference_urls: + continue + seen_reference_urls.add(reference["url"]) + deduped_references.append(reference) + result["references"] = {"search_results": deduped_references[:15]} + return result async def search_people( self, @@ -639,13 +719,83 @@ async def search_people( params += f"&location={quote_plus(location)}" url = f"https://www.linkedin.com/search/results/people/?{params}" - text = await self.extract_page(url) + extracted = await self.extract_page(url, section_name="search_results") sections: dict[str, str] = {} - if text: - sections["search_results"] = text + references: dict[str, list[Reference]] = {} + if extracted.text: + sections["search_results"] = extracted.text + if extracted.references: + references["search_results"] = extracted.references - return { + result: dict[str, Any] = { "url": url, "sections": sections, } + if references: + result["references"] = references + return result + + async def _extract_root_content( + self, + selectors: list[str], + ) -> dict[str, Any]: + """Extract innerText and raw anchor metadata from the first matching root.""" + result = await self._page.evaluate( + """({ selectors }) => { + const normalize = value => (value || '').replace(/\\s+/g, ' ').trim(); + + const findHeading = (element, root) => { + let current = element; + while (current && current !== root) { + if (current.matches && current.matches('section, article, li, div')) { + const ownHeading = current.querySelector(':scope > h1, :scope > h2, :scope > h3'); + if (ownHeading) { + const text = normalize(ownHeading.innerText || ownHeading.textContent); + if (text) return text; + } + } + + let sibling = current.previousElementSibling; + while (sibling) { + const heading = + sibling.matches && sibling.matches('h1, h2, h3') + ? sibling + : sibling.querySelector + ? sibling.querySelector('h1, h2, h3') + : null; + if (heading) { + const text = normalize(heading.innerText || heading.textContent); + if (text) return text; + } + sibling = sibling.previousElementSibling; + } + current = current.parentElement; + } + return ''; + }; + + const root = selectors + .map(selector => document.querySelector(selector)) + .find(Boolean); + const source = root ? 'root' : 'body'; + const container = root || document.body; + const text = container ? (container.innerText || '').trim() : ''; + + const references = Array.from(container.querySelectorAll('a[href]')).map(anchor => ({ + href: anchor.href || anchor.getAttribute('href') || '', + text: normalize(anchor.innerText || anchor.textContent), + aria_label: normalize(anchor.getAttribute('aria-label')), + title: normalize(anchor.getAttribute('title')), + heading: findHeading(anchor, container), + in_article: Boolean(anchor.closest('article')), + in_list: Boolean(anchor.closest('li')), + in_nav: Boolean(anchor.closest('nav')), + in_footer: Boolean(anchor.closest('footer')), + })); + + return { source, text, references }; + }""", + {"selectors": selectors}, + ) + return result diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py new file mode 100644 index 00000000..63d13c21 --- /dev/null +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -0,0 +1,359 @@ +"""Helpers for extracting compact, typed references from LinkedIn DOM links.""" + +from __future__ import annotations + +import re +from typing import Literal, TypedDict +from urllib.parse import parse_qs, unquote, urlparse, urlunparse + +ReferenceKind = Literal[ + "person", + "company", + "job", + "feed_post", + "article", + "newsletter", + "school", + "external", +] + + +class Reference(TypedDict, total=False): + """Compact reference payload returned to MCP clients.""" + + kind: ReferenceKind + url: str + text: str + context: str + + +class RawReference(TypedDict, total=False): + """Raw anchor data collected from the browser DOM.""" + + href: str + text: str + aria_label: str + title: str + heading: str + in_article: bool + in_list: bool + in_nav: bool + in_footer: bool + + +_GENERIC_LABELS = { + "show all", + "follow", + "following", + "connect", + "send", + "like", + "comment", + "repost", + "post", + "play", + "pause", + "fullscreen", + "close", + "manage notifications", + "view my newsletter", + "my newsletter", +} + +_CONTEXT_LABELS = { + "about", + "experience", + "education", + "interests", + "honors", + "languages", + "featured", + "contact info", +} + +_SECTION_CONTEXTS = { + "experience": "experience", + "education": "education", + "interests": "interests", + "honors": "honors", + "languages": "languages", + "contact_info": "contact info", + "job_posting": "job result", +} + +_REFERENCE_CAPS = { + "main_profile": 12, + "about": 12, + "posts": 12, + "search_results": 15, + "job_posting": 8, + "contact_info": 8, +} + +_URL_LIKE_RE = re.compile(r"^(?:https?://|/)\S+$", re.IGNORECASE) +_DUPLICATE_HALVES_RE = re.compile(r"^(?P.+?)\s+(?P=value)$") +_WHITESPACE_RE = re.compile(r"\s+") +_CONNECTIONS_FOLLOW_RE = re.compile(r"\bconnections follow this page\b", re.IGNORECASE) +_COMPANY_PATH_RE = re.compile(r"^/company/([^/?#]+)") +_PERSON_PATH_RE = re.compile(r"^/in/([^/?#]+)") +_SCHOOL_PATH_RE = re.compile(r"^/school/([^/?#]+)") +_JOB_PATH_RE = re.compile(r"^/jobs/view/(\d+)") +_NEWSLETTER_PATH_RE = re.compile(r"^/newsletters/([^/?#]+)") +_PULSE_PATH_RE = re.compile(r"^/pulse/([^/?#]+)") +_FEED_PATH_RE = re.compile(r"^/feed/update/([^/?#]+)") + + +def build_references( + raw_references: list[RawReference], + section_name: str, +) -> list[Reference]: + """Filter and normalize raw DOM anchors into compact references.""" + deduped: dict[str, Reference] = {} + ordered_urls: list[str] = [] + cap = _REFERENCE_CAPS.get(section_name, 12) + + for raw in raw_references: + normalized = normalize_reference(raw, section_name) + if normalized is None: + continue + + url = normalized["url"] + existing = deduped.get(url) + if existing is None: + deduped[url] = normalized + ordered_urls.append(url) + continue + + deduped[url] = _choose_better_reference(existing, normalized) + + return [deduped[url] for url in ordered_urls[:cap]] + + +def normalize_reference( + raw: RawReference, + section_name: str, +) -> Reference | None: + """Normalize one raw DOM anchor into a compact reference.""" + if raw.get("in_nav") or raw.get("in_footer"): + return None + + href = normalize_url(raw.get("href", "")) + if href is None: + return None + + kind_url = classify_link(href) + if kind_url is None: + return None + kind, normalized_url = kind_url + + text = choose_reference_text(raw, kind) + if text is None and kind not in {"feed_post", "external"}: + return None + + context = derive_context(section_name, raw, kind) + + reference: Reference = { + "kind": kind, + "url": normalized_url, + } + if text: + reference["text"] = text + if context: + reference["context"] = context + return reference + + +def normalize_url(href: str) -> str | None: + """Normalize a raw href and unwrap LinkedIn redirect URLs.""" + href = href.strip() + if not href or href.startswith("#"): + return None + + parsed = urlparse(href) + if parsed.scheme.lower() in {"blob", "javascript", "mailto", "tel"}: + return None + + if "linkedin.com" in parsed.netloc.lower() and parsed.path == "/redir/redirect/": + target = unquote((parse_qs(parsed.query).get("url") or [""])[0]).strip() + if not target: + return None + return normalize_url(target) + + if not parsed.scheme: + return None + + return urlunparse((parsed.scheme, parsed.netloc, parsed.path, "", parsed.query, "")) + + +def classify_link(href: str) -> tuple[ReferenceKind, str] | None: + """Classify and canonicalize one normalized URL.""" + parsed = urlparse(href) + host = parsed.netloc.lower() + path = parsed.path or "/" + + if "linkedin.com" not in host: + return "external", urlunparse( + (parsed.scheme, parsed.netloc, parsed.path or "/", "", parsed.query, "") + ) + + if _is_linkedin_chrome(path): + return None + + if match := _PERSON_PATH_RE.match(path): + if "/overlay/" in path or "/details/" in path or "/recent-activity/" in path: + return None + return "person", f"/in/{match.group(1)}/" + + if match := _COMPANY_PATH_RE.match(path): + return "company", f"/company/{match.group(1)}/" + + if match := _SCHOOL_PATH_RE.match(path): + return "school", f"/school/{match.group(1)}/" + + if match := _JOB_PATH_RE.match(path): + return "job", f"/jobs/view/{match.group(1)}/" + + if match := _NEWSLETTER_PATH_RE.match(path): + return "newsletter", f"/newsletters/{match.group(1)}/" + + if match := _PULSE_PATH_RE.match(path): + return "article", f"/pulse/{match.group(1)}" + + if match := _FEED_PATH_RE.match(path): + return "feed_post", f"/feed/update/{match.group(1)}" + + return None + + +def choose_reference_text( + raw: RawReference, + kind: ReferenceKind, +) -> str | None: + """Choose the best compact human-readable label for a reference.""" + candidates: list[tuple[int, str]] = [] + for priority, candidate in enumerate( + ( + raw.get("text", ""), + raw.get("aria_label", ""), + raw.get("title", ""), + ) + ): + cleaned = clean_label(candidate, kind) + if cleaned: + candidates.append((priority, cleaned)) + + if not candidates: + return None + + candidates.sort(key=lambda item: (len(item[1]), item[0])) + return candidates[0][1] + + +def clean_label(value: str, kind: ReferenceKind) -> str | None: + """Normalize and compact a candidate label.""" + value = _WHITESPACE_RE.sub(" ", value).strip() + if not value: + return None + + value = re.sub(r"^(?:View:|View|Open article:)\s*", "", value, flags=re.IGNORECASE) + value = re.sub(r"[โ€™']s\s+graphic link$", "", value, flags=re.IGNORECASE) + value = re.sub(r"\s+graphic link$", "", value, flags=re.IGNORECASE) + value = value.strip(" :-") + + if " by " in value and kind in {"article", "external"}: + value = value.split(" by ", 1)[0].strip() + + for separator in (" โ€ข ", " ยท ", " | "): + if separator in value: + value = value.split(separator, 1)[0].strip() + + duplicate_match = _DUPLICATE_HALVES_RE.match(value) + if duplicate_match: + value = duplicate_match.group("value").strip() + + if _URL_LIKE_RE.match(value): + return None + if _CONNECTIONS_FOLLOW_RE.search(value): + return None + if value.lower() in _GENERIC_LABELS: + return None + if len(value) > 80: + return None + if not re.search(r"[A-Za-z0-9]", value): + return None + + return value + + +def derive_context( + section_name: str, + raw: RawReference, + kind: ReferenceKind, +) -> str | None: + """Build a compact context hint for one retained reference.""" + if section_name in _SECTION_CONTEXTS: + return _SECTION_CONTEXTS[section_name] + + heading = clean_heading(raw.get("heading", "")) + + if section_name == "search_results": + return "job result" if kind == "job" else "search result" + + if section_name == "posts": + if kind == "person": + return "post author" + if raw.get("in_article"): + return "post attachment" + return "company post" if kind == "feed_post" else "post attachment" + + if section_name in {"main_profile", "about"}: + if heading in _CONTEXT_LABELS: + return heading + if raw.get("in_article"): + return "featured" + return "top card" + + return heading if heading in _CONTEXT_LABELS else None + + +def clean_heading(value: str) -> str | None: + """Normalize a raw heading into a short supported context label.""" + value = _WHITESPACE_RE.sub(" ", value).strip().lower() + if not value: + return None + return value if value in _CONTEXT_LABELS else None + + +def _choose_better_reference(existing: Reference, new: Reference) -> Reference: + """Keep the cleaner, richer of two duplicate-url references.""" + existing_score = _reference_score(existing) + new_score = _reference_score(new) + return new if new_score > existing_score else existing + + +def _reference_score(reference: Reference) -> tuple[int, int, int]: + text = reference.get("text") + context = reference.get("context") + return ( + 1 if text else 0, + 1 if context else 0, + -(len(text) if text else 999), + ) + + +def _is_linkedin_chrome(path: str) -> bool: + return any( + fragment in path + for fragment in ( + "/help/", + "/legal", + "/about/", + "/accessibility", + "/mypreferences/", + "/preferences/", + "/search/results/", + "/overlay/background-photo/", + "/overlay/browsemap-recommendations/", + "/preload/custom-invite/", + ) + ) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index b9539303..85713d90 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -47,7 +47,7 @@ async def get_company_profile( Default (None) scrapes only the about page. Returns: - Dict with url and sections (name -> raw text). + Dict with url, sections (name -> raw text), and optional references. Includes unknown_sections list when unrecognised names are passed. The LLM should parse the raw text in each section. """ @@ -95,7 +95,7 @@ async def get_company_posts( ctx: FastMCP context for progress reporting Returns: - Dict with url and sections (name -> raw text). + Dict with url, sections (name -> raw text), and optional references. The LLM should parse the raw text to extract individual posts. """ try: @@ -106,18 +106,24 @@ async def get_company_posts( ) url = f"https://www.linkedin.com/company/{company_name}/posts/" - text = await extractor.extract_page(url) + extracted = await extractor.extract_page(url, section_name="posts") sections: dict[str, str] = {} - if text: - sections["posts"] = text + references: dict[str, Any] = {} + if extracted.text: + sections["posts"] = extracted.text + if extracted.references: + references["posts"] = extracted.references await ctx.report_progress(progress=100, total=100, message="Complete") - return { + result = { "url": url, "sections": sections, } + if references: + result["references"] = references + return result except Exception as e: raise_tool_error(e, "get_company_posts") # NoReturn diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index 90d190a5..dcf365a1 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -41,7 +41,7 @@ async def get_job_details( ctx: FastMCP context for progress reporting Returns: - Dict with url and sections (name -> raw text). + Dict with url, sections (name -> raw text), and optional references. The LLM should parse the raw text to extract job details. """ try: @@ -97,8 +97,8 @@ async def search_jobs( sort_by: Sort results (date, relevance) Returns: - Dict with url, sections (name -> raw text), and job_ids (list of - numeric job ID strings usable with get_job_details). + Dict with url, sections (name -> raw text), job_ids (list of + numeric job ID strings usable with get_job_details), and optional references. """ try: logger.info( diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index e5ffa409..afa058bb 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -47,7 +47,7 @@ async def get_person_profile( Default (None) scrapes only the main profile page. Returns: - Dict with url and sections (name -> raw text). + Dict with url, sections (name -> raw text), and optional references. Sections may be absent if extraction yielded no content for that page. Includes unknown_sections list when unrecognised names are passed. The LLM should parse the raw text in each section. @@ -98,7 +98,7 @@ async def search_people( location: Optional location filter (e.g., "New York", "Remote") Returns: - Dict with url and sections (name -> raw text). + Dict with url, sections (name -> raw text), and optional references. The LLM should parse the raw text to extract individual people and their profiles. """ try: diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py new file mode 100644 index 00000000..317c0c77 --- /dev/null +++ b/tests/test_link_metadata.py @@ -0,0 +1,194 @@ +"""Tests for compact LinkedIn reference extraction helpers.""" + +from linkedin_mcp_server.scraping.link_metadata import RawReference, build_references + + +class TestBuildReferences: + def test_canonicalizes_and_types_linkedin_urls(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/in/williamhgates?miniProfileUrn=123", + "text": "Bill Gates", + "heading": "Featured", + }, + { + "href": "https://www.linkedin.com/company/gates-foundation/posts/", + "text": "Gates Foundation", + "heading": "Experience", + }, + { + "href": "https://www.linkedin.com/pulse/phone-call-saves-lives-bill-gates-yspvc?trackingId=123", + "text": "A phone call that saves lives", + }, + ], + "main_profile", + ) + + assert references == [ + { + "kind": "person", + "url": "/in/williamhgates/", + "text": "Bill Gates", + "context": "featured", + }, + { + "kind": "company", + "url": "/company/gates-foundation/", + "text": "Gates Foundation", + "context": "experience", + }, + { + "kind": "article", + "url": "/pulse/phone-call-saves-lives-bill-gates-yspvc", + "text": "A phone call that saves lives", + "context": "top card", + }, + ] + + def test_unwraps_redirect_and_drops_junk(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/redir/redirect/?url=https%3A%2F%2Fgatesnot.es%2Ftgn&urlhash=abc", + "text": "Gates Notes", + }, + { + "href": "blob:https://www.linkedin.com/123", + "text": "Video", + }, + { + "href": "#caret-small", + "text": "", + }, + { + "href": "https://www.linkedin.com/help/linkedin/", + "text": "Questions?", + }, + ], + "posts", + ) + + assert references == [ + { + "kind": "external", + "url": "https://gatesnot.es/tgn", + "text": "Gates Notes", + "context": "post attachment", + } + ] + + def test_prefers_cleaner_duplicate_label(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/newsletters/gates-notes-123/", + "text": "View my newsletter", + "aria_label": "Gates Notes", + }, + { + "href": "https://www.linkedin.com/newsletters/gates-notes-123/", + "text": "Gates Notes Gates Notes", + }, + ], + "posts", + ) + + assert references == [ + { + "kind": "newsletter", + "url": "/newsletters/gates-notes-123/", + "text": "Gates Notes", + "context": "post attachment", + } + ] + + def test_prefers_shorter_clean_label_over_merged_visible_text(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/pulse/test-post?trackingId=123", + "text": "Gates Notes Gates Notes A phone call that saves lives Bill Gates", + "aria_label": "Open article: A phone call that saves lives by Bill Gates โ€ข 3 min read", + } + ], + "posts", + ) + + assert references == [ + { + "kind": "article", + "url": "/pulse/test-post", + "text": "A phone call that saves lives", + "context": "post attachment", + } + ] + + def test_drops_social_proof_company_labels(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/company/gates-foundation/", + "text": "Falguni & 8 other connections follow this page", + }, + { + "href": "https://www.linkedin.com/company/gates-foundation/", + "text": "Gates Foundation", + }, + ], + "about", + ) + + assert references == [ + { + "kind": "company", + "url": "/company/gates-foundation/", + "text": "Gates Foundation", + "context": "top card", + } + ] + + def test_caps_results_per_section(self): + raw: list[RawReference] = [ + { + "href": f"https://www.linkedin.com/company/test-{idx}/", + "text": f"Company {idx}", + } + for idx in range(20) + ] + + references = build_references(raw, "about") + + assert len(references) == 12 + assert references[0]["url"] == "/company/test-0/" + assert references[-1]["url"] == "/company/test-11/" + + def test_uses_search_result_contexts(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/jobs/view/12345/", + "text": "Senior Engineer", + }, + { + "href": "https://www.linkedin.com/in/stickerdaniel/", + "text": "Daniel Sticker", + }, + ], + "search_results", + ) + + assert references == [ + { + "kind": "job", + "url": "/jobs/view/12345/", + "text": "Senior Engineer", + "context": "job result", + }, + { + "kind": "person", + "url": "/in/stickerdaniel/", + "text": "Daniel Sticker", + "context": "search result", + }, + ] diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 2090dbbb..2d3aa9d2 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -5,10 +5,20 @@ import pytest from linkedin_mcp_server.scraping.extractor import ( + ExtractedSection, LinkedInExtractor, _RATE_LIMITED_MSG, strip_linkedin_noise, ) +from linkedin_mcp_server.scraping.link_metadata import Reference + + +def extracted( + text: str, + references: list[Reference] | None = None, +) -> ExtractedSection: + """Create an ExtractedSection for tests.""" + return ExtractedSection(text=text, references=references or []) class TestBuildJobSearchUrl: @@ -104,7 +114,9 @@ def mock_page(): page = MagicMock() page.goto = AsyncMock() page.wait_for_selector = AsyncMock() - page.evaluate = AsyncMock(return_value="Sample page text") + page.evaluate = AsyncMock( + return_value={"source": "root", "text": "Sample page text", "references": []} + ) page.url = "https://www.linkedin.com/in/testuser/" page.locator = MagicMock() # Default: no modals, no CAPTCHA @@ -120,12 +132,11 @@ def mock_page(): class TestExtractPage: async def test_extract_page_returns_text(self, mock_page): mock_page.evaluate = AsyncMock( - side_effect=[ - "Sample profile text", # main.innerText - 100, # scrollHeight (first check) - None, # scrollTo - 100, # scrollHeight (unchanged = stop) - ] + return_value={ + "source": "root", + "text": "Sample profile text", + "references": [], + } ) extractor = LinkedInExtractor(mock_page) # Patch scroll_to_bottom and detect_rate_limit to avoid complex mock chains @@ -148,7 +159,8 @@ async def test_extract_page_returns_text(self, mock_page): "https://www.linkedin.com/in/testuser/" ) - assert result == "Sample profile text" + assert result.text == "Sample profile text" + assert result.references == [] mock_page.goto.assert_awaited_once() async def test_extract_page_returns_empty_on_failure(self, mock_page): @@ -156,7 +168,8 @@ async def test_extract_page_returns_empty_on_failure(self, mock_page): extractor = LinkedInExtractor(mock_page) result = await extractor.extract_page("https://www.linkedin.com/in/bad/") - assert result == "" + assert result.text == "" + assert result.references == [] async def test_rate_limit_detected(self, mock_page): from linkedin_mcp_server.core.exceptions import RateLimitError @@ -179,7 +192,9 @@ async def test_returns_rate_limited_msg_after_retry(self, mock_page): "You've approached your profile search limit\n\n" "About\nAccessibility\nTalent Solutions" ) - mock_page.evaluate = AsyncMock(return_value=noise_only) + mock_page.evaluate = AsyncMock( + return_value={"source": "root", "text": noise_only, "references": []} + ) extractor = LinkedInExtractor(mock_page) with ( patch( @@ -204,7 +219,7 @@ async def test_returns_rate_limited_msg_after_retry(self, mock_page): "https://www.linkedin.com/in/testuser/details/experience/" ) - assert result == _RATE_LIMITED_MSG + assert result.text == _RATE_LIMITED_MSG # goto called twice (initial + retry) assert mock_page.goto.await_count == 2 @@ -220,7 +235,14 @@ async def evaluate_side_effect(*args, **kwargs): return noise_only return "Education\nHarvard University\n1973 โ€“ 1975" - mock_page.evaluate = AsyncMock(side_effect=evaluate_side_effect) + async def root_content_side_effect(*args, **kwargs): + return { + "source": "root", + "text": await evaluate_side_effect(), + "references": [], + } + + mock_page.evaluate = AsyncMock(side_effect=root_content_side_effect) extractor = LinkedInExtractor(mock_page) with ( patch( @@ -245,7 +267,7 @@ async def evaluate_side_effect(*args, **kwargs): "https://www.linkedin.com/in/testuser/details/education/" ) - assert result == "Education\nHarvard University\n1973 โ€“ 1975" + assert result.text == "Education\nHarvard University\n1973 โ€“ 1975" class TestScrapePersonUrls: @@ -256,13 +278,16 @@ async def test_baseline_always_included(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -283,13 +308,13 @@ async def test_basic_info_only_visits_main_profile(self, mock_page): extractor, "extract_page", new_callable=AsyncMock, - return_value="profile text", + return_value=extracted("profile text"), ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -307,13 +332,16 @@ async def test_experience_education_visits_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -345,13 +373,16 @@ async def test_all_sections_visit_all_urls(self, mock_page): } with ( patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="contact text", + return_value=extracted("contact text"), ) as mock_overlay, patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -384,13 +415,13 @@ async def test_posts_visits_recent_activity(self, mock_page): extractor, "extract_page", new_callable=AsyncMock, - return_value="Post 1\nPost 2", + return_value=extracted("Post 1\nPost 2"), ) as mock_extract, patch.object( extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -403,13 +434,65 @@ async def test_posts_visits_recent_activity(self, mock_page): assert any("/recent-activity/all/" in url for url in urls) assert "posts" in result["sections"] + async def test_references_are_grouped_by_section(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + side_effect=[ + extracted( + "profile text", + [ + { + "kind": "person", + "url": "/in/testuser/", + "text": "Test User", + } + ], + ), + extracted( + "post text", + [ + { + "kind": "article", + "url": "/pulse/test-post", + "text": "Test post", + } + ], + ), + ], + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value=extracted(""), + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.scrape_person("testuser", {"posts"}) + + assert result["references"] == { + "main_profile": [ + {"kind": "person", "url": "/in/testuser/", "text": "Test User"} + ], + "posts": [ + {"kind": "article", "url": "/pulse/test-post", "text": "Test post"} + ], + } + async def test_error_isolation(self, mock_page): """One section failing doesn't block others.""" - async def extract_with_failure(url): + async def extract_with_failure(url, *args, **kwargs): if "experience" in url: raise Exception("Simulated failure") - return f"text for {url}" + return extracted(f"text for {url}") extractor = LinkedInExtractor(mock_page) with ( @@ -422,7 +505,7 @@ async def extract_with_failure(url): extractor, "_extract_overlay", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -445,7 +528,10 @@ async def test_company_baseline_always_included(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted("text"), ) as mock_extract, patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -467,7 +553,7 @@ async def test_about_only_visits_about(self, mock_page): extractor, "extract_page", new_callable=AsyncMock, - return_value="about text", + return_value=extracted("about text"), ) as mock_extract, patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -485,7 +571,10 @@ async def test_all_sections_visit_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( patch.object( - extractor, "extract_page", new_callable=AsyncMock, return_value="text" + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted("text"), ) as mock_extract, patch( "linkedin_mcp_server.scraping.extractor.asyncio.sleep", @@ -511,7 +600,7 @@ async def test_scrape_job(self, mock_page): extractor, "extract_page", new_callable=AsyncMock, - return_value="Job: Software Engineer", + return_value=extracted("Job: Software Engineer"), ): result = await extractor.scrape_job("12345") @@ -536,7 +625,7 @@ async def test_returns_job_ids(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="Job 1\nJob 2\nJob 3", + return_value=extracted("Job 1\nJob 2\nJob 3"), ), patch.object( extractor, @@ -560,6 +649,43 @@ async def test_returns_job_ids(self, mock_page): assert result["job_ids"] == ["111", "222", "333"] assert "search_results" in result["sections"] + async def test_returns_references(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_extract_search_page", + new_callable=AsyncMock, + return_value=extracted( + "Job 1", + [{"kind": "job", "url": "/jobs/view/111/", "text": "Job 1"}], + ), + ), + patch.object( + extractor, + "_extract_job_ids", + new_callable=AsyncMock, + return_value=["111"], + ), + patch.object( + extractor, + "_get_total_search_pages", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.search_jobs("python", max_pages=1) + + assert result["references"] == { + "search_results": [ + {"kind": "job", "url": "/jobs/view/111/", "text": "Job 1"} + ] + } + async def test_pagination_uses_fixed_page_size(self, mock_page): """Pages use &start= with fixed 25-per-page offset.""" extractor = LinkedInExtractor(mock_page) @@ -569,9 +695,9 @@ async def test_pagination_uses_fixed_page_size(self, mock_page): text_pages = iter(["Page 1 text", "Page 2 text"]) urls_visited: list[str] = [] - async def mock_extract(url): + async def mock_extract(url, *args, **kwargs): urls_visited.append(url) - return next(text_pages) + return extracted(next(text_pages)) with ( patch.object(extractor, "_extract_search_page", side_effect=mock_extract), @@ -607,7 +733,7 @@ async def test_deduplication_across_pages(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="text", + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, @@ -638,10 +764,10 @@ async def test_early_stop_no_new_ids(self, mock_page): id_pages = iter([["100", "200"], ["100", "200"]]) extract_call_count = 0 - async def mock_extract(url): + async def mock_extract(url, *args, **kwargs): nonlocal extract_call_count extract_call_count += 1 - return "text" + return extracted("text") with ( patch.object(extractor, "_extract_search_page", side_effect=mock_extract), @@ -677,7 +803,7 @@ async def test_stops_at_total_pages(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="text", + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, @@ -711,7 +837,7 @@ async def test_zero_max_pages_fetches_nothing(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="text", + return_value=extracted("text"), ) as mock_extract, patch.object( extractor, @@ -743,7 +869,7 @@ async def test_single_page(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="Job posting text", + return_value=extracted("Job posting text"), ) as mock_extract, patch.object( extractor, @@ -789,7 +915,7 @@ async def test_page_texts_joined_with_separator(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - side_effect=lambda url: next(text_pages), + side_effect=lambda url, *args, **kwargs: extracted(next(text_pages)), ) as mock_extract, patch.object( extractor, @@ -823,7 +949,7 @@ async def test_empty_results(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="", + return_value=extracted(""), ), patch.object( extractor, @@ -857,7 +983,7 @@ async def test_no_ids_on_first_page_captures_text(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="No matching jobs found", + return_value=extracted("No matching jobs found"), ), patch.object( extractor, @@ -890,7 +1016,7 @@ async def test_url_redirect_skips_id_extraction(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value="Login page content", + return_value=extracted("Login page content"), ), patch.object( extractor, @@ -923,7 +1049,7 @@ async def test_rate_limited_skips_ids_and_text(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value=_RATE_LIMITED_MSG, + return_value=extracted(_RATE_LIMITED_MSG), ), patch.object( extractor, @@ -993,6 +1119,19 @@ def test_real_footer_with_languages(self): ) assert strip_linkedin_noise(text) == "Company info" + def test_strips_media_controls_lines(self): + text = ( + "Feed post number 1\n" + "Play\n" + "Loaded: 100.00%\n" + "Remaining time 0:07\n" + "Playback speed\n" + "Actual post content\n" + "Show captions\n" + "Close modal window" + ) + assert strip_linkedin_noise(text) == "Feed post number 1\nActual post content" + class TestActivityFeedExtraction: """Tests for activity page detection and wait behavior in _extract_page_once.""" @@ -1001,7 +1140,13 @@ async def test_activity_page_waits_for_content_and_uses_slow_scroll( self, mock_page ): """Activity URLs should call wait_for_function and use slower scroll params.""" - mock_page.evaluate = AsyncMock(return_value="Post content " * 50) + mock_page.evaluate = AsyncMock( + return_value={ + "source": "root", + "text": "Post content " * 50, + "references": [], + } + ) mock_page.wait_for_function = AsyncMock() extractor = LinkedInExtractor(mock_page) with ( @@ -1028,11 +1173,13 @@ async def test_activity_page_waits_for_content_and_uses_slow_scroll( _, kwargs = mock_scroll.call_args assert kwargs["pause_time"] == 1.0 assert kwargs["max_scrolls"] == 10 - assert len(result) > 200 + assert len(result.text) > 200 async def test_non_activity_page_skips_wait_and_uses_fast_scroll(self, mock_page): """Non-activity URLs should not call wait_for_function and use fast scroll.""" - mock_page.evaluate = AsyncMock(return_value="Profile text") + mock_page.evaluate = AsyncMock( + return_value={"source": "root", "text": "Profile text", "references": []} + ) mock_page.wait_for_function = AsyncMock() extractor = LinkedInExtractor(mock_page) with ( @@ -1065,7 +1212,9 @@ async def test_activity_page_timeout_proceeds_gracefully(self, mock_page): from patchright.async_api import TimeoutError as PlaywrightTimeoutError tab_headers = "All activity\nPosts\nComments\nVideos\nImages" - mock_page.evaluate = AsyncMock(return_value=tab_headers) + mock_page.evaluate = AsyncMock( + return_value={"source": "root", "text": tab_headers, "references": []} + ) mock_page.wait_for_function = AsyncMock( side_effect=PlaywrightTimeoutError("Timeout") ) @@ -1090,4 +1239,4 @@ async def test_activity_page_timeout_proceeds_gracefully(self, mock_page): ) # Should return whatever text is available, not crash - assert result == tab_headers + assert result.text == tab_headers diff --git a/tests/test_tools.py b/tests/test_tools.py index abc82154..94ac2a77 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -4,6 +4,8 @@ import pytest from fastmcp import FastMCP +from linkedin_mcp_server.scraping.extractor import ExtractedSection + async def get_tool_fn( mcp: FastMCP, name: str @@ -23,7 +25,9 @@ def _make_mock_extractor(scrape_result: dict) -> MagicMock: mock.scrape_job = AsyncMock(return_value=scrape_result) mock.search_jobs = AsyncMock(return_value=scrape_result) mock.search_people = AsyncMock(return_value=scrape_result) - mock.extract_page = AsyncMock(return_value="some text") + mock.extract_page = AsyncMock( + return_value=ExtractedSection(text="some text", references=[]) + ) return mock @@ -203,7 +207,9 @@ async def test_get_company_profile_unknown_section(self, mock_context): async def test_get_company_posts(self, mock_context): mock_extractor = MagicMock() - mock_extractor.extract_page = AsyncMock(return_value="Post 1\nPost 2") + mock_extractor.extract_page = AsyncMock( + return_value=ExtractedSection(text="Post 1\nPost 2", references=[]) + ) from linkedin_mcp_server.tools.company import register_company_tools From 8b8aae7a7bb1e446dad1dd3194aabadd93d6f2ab Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 16:45:14 +0100 Subject: [PATCH 469/565] fix(scraping): address review feedback --- linkedin_mcp_server/scraping/extractor.py | 46 ++++---- linkedin_mcp_server/scraping/link_metadata.py | 100 ++++++++++++------ tests/test_link_metadata.py | 72 ++++++++++++- 3 files changed, 162 insertions(+), 56 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 66b43a2c..5c576adf 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -16,7 +16,11 @@ scroll_job_sidebar, scroll_to_bottom, ) -from linkedin_mcp_server.scraping.link_metadata import Reference, build_references +from linkedin_mcp_server.scraping.link_metadata import ( + Reference, + build_references, + dedupe_references, +) from .fields import COMPANY_SECTIONS, PERSON_SECTIONS @@ -694,14 +698,9 @@ async def search_jobs( "job_ids": all_job_ids, } if page_references: - deduped_references: list[Reference] = [] - seen_reference_urls: set[str] = set() - for reference in page_references: - if reference["url"] in seen_reference_urls: - continue - seen_reference_urls.add(reference["url"]) - deduped_references.append(reference) - result["references"] = {"search_results": deduped_references[:15]} + result["references"] = { + "search_results": dedupe_references(page_references, cap=15) + } return result async def search_people( @@ -782,17 +781,24 @@ async def _extract_root_content( const container = root || document.body; const text = container ? (container.innerText || '').trim() : ''; - const references = Array.from(container.querySelectorAll('a[href]')).map(anchor => ({ - href: anchor.href || anchor.getAttribute('href') || '', - text: normalize(anchor.innerText || anchor.textContent), - aria_label: normalize(anchor.getAttribute('aria-label')), - title: normalize(anchor.getAttribute('title')), - heading: findHeading(anchor, container), - in_article: Boolean(anchor.closest('article')), - in_list: Boolean(anchor.closest('li')), - in_nav: Boolean(anchor.closest('nav')), - in_footer: Boolean(anchor.closest('footer')), - })); + const references = Array.from(container.querySelectorAll('a[href]')).map(anchor => { + const rawHref = (anchor.getAttribute('href') || '').trim(); + const href = rawHref.startsWith('#') + ? rawHref + : (anchor.href || rawHref || ''); + + return { + href, + text: normalize(anchor.innerText || anchor.textContent), + aria_label: normalize(anchor.getAttribute('aria-label')), + title: normalize(anchor.getAttribute('title')), + heading: findHeading(anchor, container), + in_article: Boolean(anchor.closest('article')), + in_list: Boolean(anchor.closest('li')), + in_nav: Boolean(anchor.closest('nav')), + in_footer: Boolean(anchor.closest('footer')), + }; + }); return { source, text, references }; }""", diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 63d13c21..7c5e8887 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -3,7 +3,7 @@ from __future__ import annotations import re -from typing import Literal, TypedDict +from typing import Literal, NotRequired, Required, TypedDict from urllib.parse import parse_qs, unquote, urlparse, urlunparse ReferenceKind = Literal[ @@ -18,13 +18,13 @@ ] -class Reference(TypedDict, total=False): +class Reference(TypedDict): """Compact reference payload returned to MCP clients.""" - kind: ReferenceKind - url: str - text: str - context: str + kind: Required[ReferenceKind] + url: Required[str] + text: NotRequired[str] + context: NotRequired[str] class RawReference(TypedDict, total=False): @@ -108,25 +108,16 @@ def build_references( section_name: str, ) -> list[Reference]: """Filter and normalize raw DOM anchors into compact references.""" - deduped: dict[str, Reference] = {} - ordered_urls: list[str] = [] cap = _REFERENCE_CAPS.get(section_name, 12) + normalized_references: list[Reference] = [] for raw in raw_references: normalized = normalize_reference(raw, section_name) if normalized is None: continue + normalized_references.append(normalized) - url = normalized["url"] - existing = deduped.get(url) - if existing is None: - deduped[url] = normalized - ordered_urls.append(url) - continue - - deduped[url] = _choose_better_reference(existing, normalized) - - return [deduped[url] for url in ordered_urls[:cap]] + return dedupe_references(normalized_references, cap=cap) def normalize_reference( @@ -173,7 +164,8 @@ def normalize_url(href: str) -> str | None: if parsed.scheme.lower() in {"blob", "javascript", "mailto", "tel"}: return None - if "linkedin.com" in parsed.netloc.lower() and parsed.path == "/redir/redirect/": + host = parsed.netloc.lower() + if _is_linkedin_host(host) and parsed.path == "/redir/redirect/": target = unquote((parse_qs(parsed.query).get("url") or [""])[0]).strip() if not target: return None @@ -191,7 +183,7 @@ def classify_link(href: str) -> tuple[ReferenceKind, str] | None: host = parsed.netloc.lower() path = parsed.path or "/" - if "linkedin.com" not in host: + if not _is_linkedin_host(host): return "external", urlunparse( (parsed.scheme, parsed.netloc, parsed.path or "/", "", parsed.query, "") ) @@ -331,6 +323,27 @@ def _choose_better_reference(existing: Reference, new: Reference) -> Reference: return new if new_score > existing_score else existing +def dedupe_references( + references: list[Reference], + cap: int | None = None, +) -> list[Reference]: + """Dedupe references by URL while keeping the cleaner duplicate in order.""" + deduped: dict[str, Reference] = {} + ordered_urls: list[str] = [] + + for reference in references: + url = reference["url"] + existing = deduped.get(url) + if existing is None: + deduped[url] = reference + ordered_urls.append(url) + continue + deduped[url] = _choose_better_reference(existing, reference) + + ordered = [deduped[url] for url in ordered_urls] + return ordered[:cap] if cap is not None else ordered + + def _reference_score(reference: Reference) -> tuple[int, int, int]: text = reference.get("text") context = reference.get("context") @@ -342,18 +355,35 @@ def _reference_score(reference: Reference) -> tuple[int, int, int]: def _is_linkedin_chrome(path: str) -> bool: - return any( - fragment in path - for fragment in ( - "/help/", - "/legal", - "/about/", - "/accessibility", - "/mypreferences/", - "/preferences/", - "/search/results/", - "/overlay/background-photo/", - "/overlay/browsemap-recommendations/", - "/preload/custom-invite/", - ) - ) + path = path.split("?", 1)[0].split("#", 1)[0] + if not path.startswith("/"): + path = f"/{path}" + + segments = [segment for segment in path.split("/") if segment] + if not segments: + return False + + first = segments[0] + second = segments[1] if len(segments) > 1 else "" + + if first in { + "help", + "legal", + "about", + "accessibility", + "mypreferences", + "preferences", + }: + return True + if first == "search" and second == "results": + return True + if first == "overlay" and second in { + "background-photo", + "browsemap-recommendations", + }: + return True + return first == "preload" and second == "custom-invite" + + +def _is_linkedin_host(host: str) -> bool: + return host == "linkedin.com" or host.endswith(".linkedin.com") diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 317c0c77..535b775a 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -1,6 +1,10 @@ """Tests for compact LinkedIn reference extraction helpers.""" -from linkedin_mcp_server.scraping.link_metadata import RawReference, build_references +from linkedin_mcp_server.scraping.link_metadata import ( + RawReference, + build_references, + dedupe_references, +) class TestBuildReferences: @@ -192,3 +196,69 @@ def test_uses_search_result_contexts(self): "context": "search result", }, ] + + def test_does_not_treat_lookalike_domains_as_linkedin(self): + references = build_references( + [ + { + "href": "https://www.notlinkedin.com/company/fake/about/", + "text": "Fake Company", + } + ], + "about", + ) + + assert references == [ + { + "kind": "external", + "url": "https://www.notlinkedin.com/company/fake/about/", + "text": "Fake Company", + "context": "top card", + } + ] + + def test_keeps_company_about_routes(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/company/legalzoom/about/", + "text": "LegalZoom", + } + ], + "about", + ) + + assert references == [ + { + "kind": "company", + "url": "/company/legalzoom/", + "text": "LegalZoom", + "context": "top card", + } + ] + + def test_cross_page_dedupe_keeps_better_reference(self): + references = dedupe_references( + [ + { + "kind": "job", + "url": "/jobs/view/123/", + "text": "Job", + }, + { + "kind": "job", + "url": "/jobs/view/123/", + "text": "Senior Software Engineer", + "context": "job result", + }, + ] + ) + + assert references == [ + { + "kind": "job", + "url": "/jobs/view/123/", + "text": "Senior Software Engineer", + "context": "job result", + } + ] From a5956642429e67c3e6b5f2e24553c65f7e36f69d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 17:06:55 +0100 Subject: [PATCH 470/565] fix(scraping): refine review follow-ups --- linkedin_mcp_server/scraping/extractor.py | 82 +++++++++++-------- linkedin_mcp_server/scraping/link_metadata.py | 9 +- tests/test_link_metadata.py | 21 +++++ tests/test_scraping.py | 24 +++++- 4 files changed, 101 insertions(+), 35 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 5c576adf..a5aaea9b 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -87,11 +87,13 @@ def _normalize_csv(value: str, mapping: dict[str, str]) -> str: re.compile(r"^Explore premium profiles$", re.MULTILINE), # InMail upsell in contact info overlay re.compile(r"^Get up to .+ replies when you message with InMail$", re.MULTILINE), - # Footer nav links in profile/posts pages - re.compile(r"^Careers$", re.MULTILINE), - re.compile(r"^Privacy & Terms$", re.MULTILINE), - re.compile(r"^Questions\?$", re.MULTILINE), - re.compile(r"^Select language$", re.MULTILINE), + # Footer nav clusters in profile/posts pages + re.compile( + r"^(?:Careers|Privacy & Terms|Questions\?|Select language)\n+" + r"(?:Privacy & Terms|Questions\?|Select language|Advertising|Ad Choices|" + r"[A-Za-z]+ \([A-Za-z]+\))", + re.MULTILINE, + ), ] _NOISE_LINES: list[re.Pattern[str]] = [ @@ -673,6 +675,8 @@ async def search_jobs( if not new_ids: page_texts.append(extracted.text) + if extracted.references: + page_references.extend(extracted.references) logger.debug("No new job IDs on page %d, stopping", page_num + 1) break @@ -743,35 +747,21 @@ async def _extract_root_content( result = await self._page.evaluate( """({ selectors }) => { const normalize = value => (value || '').replace(/\\s+/g, ' ').trim(); + const containerSelector = 'section, article, li, div'; + const headingSelector = 'h1, h2, h3'; + const directHeadingSelector = ':scope > h1, :scope > h2, :scope > h3'; - const findHeading = (element, root) => { - let current = element; - while (current && current !== root) { - if (current.matches && current.matches('section, article, li, div')) { - const ownHeading = current.querySelector(':scope > h1, :scope > h2, :scope > h3'); - if (ownHeading) { - const text = normalize(ownHeading.innerText || ownHeading.textContent); - if (text) return text; - } - } + const getHeadingText = element => { + if (!element) return ''; - let sibling = current.previousElementSibling; - while (sibling) { - const heading = - sibling.matches && sibling.matches('h1, h2, h3') - ? sibling - : sibling.querySelector - ? sibling.querySelector('h1, h2, h3') - : null; - if (heading) { - const text = normalize(heading.innerText || heading.textContent); - if (text) return text; - } - sibling = sibling.previousElementSibling; - } - current = current.parentElement; - } - return ''; + const heading = + element.matches && element.matches(headingSelector) + ? element + : element.querySelector + ? element.querySelector(directHeadingSelector) + : null; + + return normalize(heading?.innerText || heading?.textContent); }; const root = selectors @@ -780,6 +770,32 @@ async def _extract_root_content( const source = root ? 'root' : 'body'; const container = root || document.body; const text = container ? (container.innerText || '').trim() : ''; + const headingMap = new WeakMap(); + + const candidateContainers = [container, ...container.querySelectorAll(containerSelector)]; + candidateContainers.forEach(node => { + const ownHeading = getHeadingText(node); + const previousHeading = getHeadingText(node.previousElementSibling); + const heading = ownHeading || previousHeading; + if (heading) { + headingMap.set(node, heading); + } + }); + + const findHeading = element => { + let current = element.closest(containerSelector) || container; + for (let depth = 0; current && depth < 4; depth += 1) { + const heading = headingMap.get(current); + if (heading) { + return heading; + } + if (current === container) { + break; + } + current = current.parentElement?.closest(containerSelector) || null; + } + return ''; + }; const references = Array.from(container.querySelectorAll('a[href]')).map(anchor => { const rawHref = (anchor.getAttribute('href') || '').trim(); @@ -792,7 +808,7 @@ async def _extract_root_content( text: normalize(anchor.innerText || anchor.textContent), aria_label: normalize(anchor.getAttribute('aria-label')), title: normalize(anchor.getAttribute('title')), - heading: findHeading(anchor, container), + heading: findHeading(anchor), in_article: Boolean(anchor.closest('article')), in_list: Boolean(anchor.closest('li')), in_nav: Boolean(anchor.closest('nav')), diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 7c5e8887..1bba7867 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -237,7 +237,7 @@ def choose_reference_text( if not candidates: return None - candidates.sort(key=lambda item: (len(item[1]), item[0])) + candidates.sort(key=lambda item: (_label_sort_key(item[1]), item[0], len(item[1]))) return candidates[0][1] @@ -269,6 +269,8 @@ def clean_label(value: str, kind: ReferenceKind) -> str | None: return None if value.lower() in _GENERIC_LABELS: return None + if len(value) < 2: + return None if len(value) > 80: return None if not re.search(r"[A-Za-z0-9]", value): @@ -354,6 +356,11 @@ def _reference_score(reference: Reference) -> tuple[int, int, int]: ) +def _label_sort_key(label: str) -> tuple[int, int]: + """Prefer concise labels, but avoid low-signal 1-2 character strings.""" + return (1 if len(label) < 3 else 0, len(label)) + + def _is_linkedin_chrome(path: str) -> bool: path = path.split("?", 1)[0].split("#", 1)[0] if not path.startswith("/"): diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 535b775a..ab2a37fc 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -128,6 +128,27 @@ def test_prefers_shorter_clean_label_over_merged_visible_text(self): } ] + def test_deprioritizes_single_character_labels(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/in/williamhgates/", + "text": "1", + "aria_label": "Bill Gates", + } + ], + "main_profile", + ) + + assert references == [ + { + "kind": "person", + "url": "/in/williamhgates/", + "text": "Bill Gates", + "context": "top card", + } + ] + def test_drops_social_proof_company_labels(self): references = build_references( [ diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 2d3aa9d2..8f8ea552 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -767,7 +767,15 @@ async def test_early_stop_no_new_ids(self, mock_page): async def mock_extract(url, *args, **kwargs): nonlocal extract_call_count extract_call_count += 1 - return extracted("text") + if extract_call_count == 1: + return extracted( + "text", + [{"kind": "job", "url": "/jobs/view/100/", "text": "Job 100"}], + ) + return extracted( + "text", + [{"kind": "job", "url": "/jobs/view/200/", "text": "Job 200"}], + ) with ( patch.object(extractor, "_extract_search_page", side_effect=mock_extract), @@ -792,6 +800,12 @@ async def mock_extract(url, *args, **kwargs): assert result["job_ids"] == ["100", "200"] assert extract_call_count == 2 + assert result["references"] == { + "search_results": [ + {"kind": "job", "url": "/jobs/view/100/", "text": "Job 100"}, + {"kind": "job", "url": "/jobs/view/200/", "text": "Job 200"}, + ] + } async def test_stops_at_total_pages(self, mock_page): """Should stop when total_pages from pagination state is reached.""" @@ -1119,6 +1133,14 @@ def test_real_footer_with_languages(self): ) assert strip_linkedin_noise(text) == "Company info" + def test_preserves_real_careers_content(self): + text = "Careers\nWe're hiring globally.\nOpen roles in engineering and design." + assert strip_linkedin_noise(text) == text + + def test_preserves_real_questions_content(self): + text = "Questions?\nReach out to our recruiting team for details." + assert strip_linkedin_noise(text) == text + def test_strips_media_controls_lines(self): text = ( "Feed post number 1\n" From db9158375093969c22ef9af7a23a84f7b13331ff Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 18:25:25 +0100 Subject: [PATCH 471/565] fix(scraping): address scheme review --- linkedin_mcp_server/scraping/extractor.py | 42 +++++++++++-------- linkedin_mcp_server/scraping/link_metadata.py | 16 +++++-- tests/test_link_metadata.py | 28 +++++++++++++ tests/test_scraping.py | 18 ++++++++ 4 files changed, 82 insertions(+), 22 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index a5aaea9b..02bcc7c0 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -797,24 +797,30 @@ async def _extract_root_content( return ''; }; - const references = Array.from(container.querySelectorAll('a[href]')).map(anchor => { - const rawHref = (anchor.getAttribute('href') || '').trim(); - const href = rawHref.startsWith('#') - ? rawHref - : (anchor.href || rawHref || ''); - - return { - href, - text: normalize(anchor.innerText || anchor.textContent), - aria_label: normalize(anchor.getAttribute('aria-label')), - title: normalize(anchor.getAttribute('title')), - heading: findHeading(anchor), - in_article: Boolean(anchor.closest('article')), - in_list: Boolean(anchor.closest('li')), - in_nav: Boolean(anchor.closest('nav')), - in_footer: Boolean(anchor.closest('footer')), - }; - }); + const references = Array.from(container.querySelectorAll('a[href]')) + .map(anchor => { + const rawHref = (anchor.getAttribute('href') || '').trim(); + if (!rawHref || rawHref === '#') { + return null; + } + + const href = rawHref.startsWith('#') + ? rawHref + : (anchor.href || rawHref); + + return { + href, + text: normalize(anchor.innerText || anchor.textContent), + aria_label: normalize(anchor.getAttribute('aria-label')), + title: normalize(anchor.getAttribute('title')), + heading: findHeading(anchor), + in_article: Boolean(anchor.closest('article')), + in_list: Boolean(anchor.closest('li')), + in_nav: Boolean(anchor.closest('nav')), + in_footer: Boolean(anchor.closest('footer')), + }; + }) + .filter(Boolean); return { source, text, references }; }""", diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 1bba7867..dd845c0c 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -161,7 +161,10 @@ def normalize_url(href: str) -> str | None: return None parsed = urlparse(href) - if parsed.scheme.lower() in {"blob", "javascript", "mailto", "tel"}: + scheme = parsed.scheme.lower() + if scheme in {"blob", "javascript", "mailto", "tel"}: + return None + if scheme and scheme not in {"http", "https"}: return None host = parsed.netloc.lower() @@ -237,7 +240,7 @@ def choose_reference_text( if not candidates: return None - candidates.sort(key=lambda item: (_label_sort_key(item[1]), item[0], len(item[1]))) + candidates.sort(key=lambda item: (_label_sort_key(item[1]), item[0])) return candidates[0][1] @@ -346,13 +349,13 @@ def dedupe_references( return ordered[:cap] if cap is not None else ordered -def _reference_score(reference: Reference) -> tuple[int, int, int]: +def _reference_score(reference: Reference) -> tuple[int, int, int | float]: text = reference.get("text") context = reference.get("context") return ( 1 if text else 0, 1 if context else 0, - -(len(text) if text else 999), + _missing_text_penalty(text), ) @@ -361,6 +364,11 @@ def _label_sort_key(label: str) -> tuple[int, int]: return (1 if len(label) < 3 else 0, len(label)) +def _missing_text_penalty(text: str | None) -> int | float: + """Score missing text as strictly worse than any text-bearing reference.""" + return -len(text) if text else float("-inf") + + def _is_linkedin_chrome(path: str) -> bool: path = path.split("?", 1)[0].split("#", 1)[0] if not path.startswith("/"): diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index ab2a37fc..0e19d2ca 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -82,6 +82,34 @@ def test_unwraps_redirect_and_drops_junk(self): } ] + def test_drops_non_http_external_schemes(self): + references = build_references( + [ + { + "href": "data:text/html,

hello

", + "text": "Inline payload", + }, + { + "href": "ftp://example.com/report.csv", + "text": "FTP report", + }, + { + "href": "https://example.com/report.csv", + "text": "HTTPS report", + }, + ], + "posts", + ) + + assert references == [ + { + "kind": "external", + "url": "https://example.com/report.csv", + "text": "HTTPS report", + "context": "post attachment", + } + ] + def test_prefers_cleaner_duplicate_label(self): references = build_references( [ diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 8f8ea552..e1929f7b 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -163,6 +163,24 @@ async def test_extract_page_returns_text(self, mock_page): assert result.references == [] mock_page.goto.assert_awaited_once() + async def test_root_content_filters_empty_href_before_resolution(self, mock_page): + mock_page.evaluate = AsyncMock( + return_value={ + "source": "root", + "text": "Sample profile text", + "references": [], + } + ) + extractor = LinkedInExtractor(mock_page) + + await extractor._extract_root_content(["main"]) + + await_args = mock_page.evaluate.await_args + assert await_args is not None + script = await_args.args[0] + assert "if (!rawHref || rawHref === '#')" in script + assert ".filter(Boolean);" in script + async def test_extract_page_returns_empty_on_failure(self, mock_page): mock_page.goto = AsyncMock(side_effect=Exception("Network error")) extractor = LinkedInExtractor(mock_page) From 5e026d9723dce158b4ef808cf217e1bc645df8ed Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 20:08:05 +0100 Subject: [PATCH 472/565] fix(scraping): polish review cleanup --- linkedin_mcp_server/scraping/extractor.py | 10 ++++++++-- linkedin_mcp_server/scraping/link_metadata.py | 5 ++--- tests/test_link_metadata.py | 4 ++-- tests/test_scraping.py | 6 ++++-- 4 files changed, 16 insertions(+), 9 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 02bcc7c0..e4e1bb94 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -750,6 +750,7 @@ async def _extract_root_content( const containerSelector = 'section, article, li, div'; const headingSelector = 'h1, h2, h3'; const directHeadingSelector = ':scope > h1, :scope > h2, :scope > h3'; + const MAX_HEADING_CONTAINERS = 300; const getHeadingText = element => { if (!element) return ''; @@ -772,7 +773,13 @@ async def _extract_root_content( const text = container ? (container.innerText || '').trim() : ''; const headingMap = new WeakMap(); - const candidateContainers = [container, ...container.querySelectorAll(containerSelector)]; + const candidateContainers = [ + container, + ...Array.from(container.querySelectorAll(containerSelector)).slice( + 0, + MAX_HEADING_CONTAINERS, + ), + ]; candidateContainers.forEach(node => { const ownHeading = getHeadingText(node); const previousHeading = getHeadingText(node.previousElementSibling); @@ -815,7 +822,6 @@ async def _extract_root_content( title: normalize(anchor.getAttribute('title')), heading: findHeading(anchor), in_article: Boolean(anchor.closest('article')), - in_list: Boolean(anchor.closest('li')), in_nav: Boolean(anchor.closest('nav')), in_footer: Boolean(anchor.closest('footer')), }; diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index dd845c0c..7ed59bc5 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -36,7 +36,6 @@ class RawReference(TypedDict, total=False): title: str heading: str in_article: bool - in_list: bool in_nav: bool in_footer: bool @@ -212,10 +211,10 @@ def classify_link(href: str) -> tuple[ReferenceKind, str] | None: return "newsletter", f"/newsletters/{match.group(1)}/" if match := _PULSE_PATH_RE.match(path): - return "article", f"/pulse/{match.group(1)}" + return "article", f"/pulse/{match.group(1)}/" if match := _FEED_PATH_RE.match(path): - return "feed_post", f"/feed/update/{match.group(1)}" + return "feed_post", f"/feed/update/{match.group(1)}/" return None diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 0e19d2ca..9b153f94 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -44,7 +44,7 @@ def test_canonicalizes_and_types_linkedin_urls(self): }, { "kind": "article", - "url": "/pulse/phone-call-saves-lives-bill-gates-yspvc", + "url": "/pulse/phone-call-saves-lives-bill-gates-yspvc/", "text": "A phone call that saves lives", "context": "top card", }, @@ -150,7 +150,7 @@ def test_prefers_shorter_clean_label_over_merged_visible_text(self): assert references == [ { "kind": "article", - "url": "/pulse/test-post", + "url": "/pulse/test-post/", "text": "A phone call that saves lives", "context": "post attachment", } diff --git a/tests/test_scraping.py b/tests/test_scraping.py index e1929f7b..3fc6b7ad 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -178,7 +178,9 @@ async def test_root_content_filters_empty_href_before_resolution(self, mock_page await_args = mock_page.evaluate.await_args assert await_args is not None script = await_args.args[0] + assert "MAX_HEADING_CONTAINERS = 300" in script assert "if (!rawHref || rawHref === '#')" in script + assert "in_list" not in script assert ".filter(Boolean);" in script async def test_extract_page_returns_empty_on_failure(self, mock_page): @@ -475,7 +477,7 @@ async def test_references_are_grouped_by_section(self, mock_page): [ { "kind": "article", - "url": "/pulse/test-post", + "url": "/pulse/test-post/", "text": "Test post", } ], @@ -500,7 +502,7 @@ async def test_references_are_grouped_by_section(self, mock_page): {"kind": "person", "url": "/in/testuser/", "text": "Test User"} ], "posts": [ - {"kind": "article", "url": "/pulse/test-post", "text": "Test post"} + {"kind": "article", "url": "/pulse/test-post/", "text": "Test post"} ], } From 00a2a372379ac1f050ce2d954aca438c704d8919 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 21:48:30 +0100 Subject: [PATCH 473/565] fix(scraping): refine context heuristics --- linkedin_mcp_server/scraping/extractor.py | 14 ++++++++++++- linkedin_mcp_server/scraping/link_metadata.py | 4 +++- tests/test_link_metadata.py | 21 +++++++++++++++++++ tests/test_scraping.py | 2 ++ 4 files changed, 39 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index e4e1bb94..de03c9c0 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -765,6 +765,18 @@ async def _extract_root_content( return normalize(heading?.innerText || heading?.textContent); }; + const getPreviousHeading = node => { + let sibling = node?.previousElementSibling || null; + for (let index = 0; sibling && index < 3; index += 1) { + const heading = getHeadingText(sibling); + if (heading) { + return heading; + } + sibling = sibling.previousElementSibling; + } + return ''; + }; + const root = selectors .map(selector => document.querySelector(selector)) .find(Boolean); @@ -782,7 +794,7 @@ async def _extract_root_content( ]; candidateContainers.forEach(node => { const ownHeading = getHeadingText(node); - const previousHeading = getHeadingText(node.previousElementSibling); + const previousHeading = getPreviousHeading(node); const heading = ownHeading || previousHeading; if (heading) { headingMap.set(node, heading); diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 7ed59bc5..6f675c25 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -298,9 +298,11 @@ def derive_context( if section_name == "posts": if kind == "person": return "post author" + if kind == "feed_post": + return "company post" if raw.get("in_article"): return "post attachment" - return "company post" if kind == "feed_post" else "post attachment" + return "post attachment" if section_name in {"main_profile", "about"}: if heading in _CONTEXT_LABELS: diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 9b153f94..767dee6c 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -177,6 +177,27 @@ def test_deprioritizes_single_character_labels(self): } ] + def test_prefers_company_post_context_for_feed_posts(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/feed/update/urn:li:activity:123/", + "text": "Original company post", + "in_article": True, + } + ], + "posts", + ) + + assert references == [ + { + "kind": "feed_post", + "url": "/feed/update/urn:li:activity:123/", + "text": "Original company post", + "context": "company post", + } + ] + def test_drops_social_proof_company_labels(self): references = build_references( [ diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 3fc6b7ad..ef83bdcf 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -179,6 +179,8 @@ async def test_root_content_filters_empty_href_before_resolution(self, mock_page assert await_args is not None script = await_args.args[0] assert "MAX_HEADING_CONTAINERS = 300" in script + assert "const getPreviousHeading = node =>" in script + assert "index < 3" in script assert "if (!rawHref || rawHref === '#')" in script assert "in_list" not in script assert ".filter(Boolean);" in script From 54fa836eda26c52a2a40690ef88e9f0d994f4b5e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 21:57:24 +0100 Subject: [PATCH 474/565] fix(scraping): address final review --- linkedin_mcp_server/scraping/extractor.py | 2 ++ linkedin_mcp_server/scraping/link_metadata.py | 2 -- tests/test_link_metadata.py | 2 +- tests/test_scraping.py | 10 +++++++++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index de03c9c0..3c1abc78 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -669,6 +669,8 @@ async def search_jobs( self._page.url, ) page_texts.append(extracted.text) + if extracted.references: + page_references.extend(extracted.references) break page_ids = await self._extract_job_ids() new_ids = [jid for jid in page_ids if jid not in seen_ids] diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 6f675c25..173944c0 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -300,8 +300,6 @@ def derive_context( return "post author" if kind == "feed_post": return "company post" - if raw.get("in_article"): - return "post attachment" return "post attachment" if section_name in {"main_profile", "about"}: diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 767dee6c..d805f7a2 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -156,7 +156,7 @@ def test_prefers_shorter_clean_label_over_merged_visible_text(self): } ] - def test_deprioritizes_single_character_labels(self): + def test_rejects_single_character_labels(self): references = build_references( [ { diff --git a/tests/test_scraping.py b/tests/test_scraping.py index ef83bdcf..7999067e 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -1052,7 +1052,10 @@ async def test_url_redirect_skips_id_extraction(self, mock_page): extractor, "_extract_search_page", new_callable=AsyncMock, - return_value=extracted("Login page content"), + return_value=extracted( + "Login page content", + [{"kind": "person", "url": "/in/testuser/", "text": "Test User"}], + ), ), patch.object( extractor, @@ -1076,6 +1079,11 @@ async def test_url_redirect_skips_id_extraction(self, mock_page): mock_ids.assert_not_awaited() assert result["job_ids"] == [] assert result["sections"]["search_results"] == "Login page content" + assert result["references"] == { + "search_results": [ + {"kind": "person", "url": "/in/testuser/", "text": "Test User"} + ] + } async def test_rate_limited_skips_ids_and_text(self, mock_page): """Rate-limited pages should yield no IDs or text.""" From 1f60afae596c9a7c29029ee9db9ff8f02289cec4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 22:36:33 +0100 Subject: [PATCH 475/565] fix(auth): detect login barriers --- linkedin_mcp_server/core/__init__.py | 8 ++- linkedin_mcp_server/core/auth.py | 68 ++++++++++++++++++++--- linkedin_mcp_server/scraping/extractor.py | 42 ++++++++++++-- tests/test_core_auth.py | 34 ++++++++++++ tests/test_scraping.py | 34 ++++++++++++ 5 files changed, 172 insertions(+), 14 deletions(-) create mode 100644 tests/test_core_auth.py diff --git a/linkedin_mcp_server/core/__init__.py b/linkedin_mcp_server/core/__init__.py index a0a9a326..caf36726 100644 --- a/linkedin_mcp_server/core/__init__.py +++ b/linkedin_mcp_server/core/__init__.py @@ -1,6 +1,11 @@ """Core browser management, authentication, and scraping utilities.""" -from .auth import is_logged_in, wait_for_manual_login, warm_up_browser +from .auth import ( + detect_auth_barrier, + is_logged_in, + wait_for_manual_login, + warm_up_browser, +) from .browser import BrowserManager from .exceptions import ( AuthenticationError, @@ -16,6 +21,7 @@ __all__ = [ "AuthenticationError", "BrowserManager", + "detect_auth_barrier", "ElementNotFoundError", "LinkedInScraperException", "NetworkError", diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index a4fb20b1..21bbe567 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -2,6 +2,7 @@ import asyncio import logging +import re from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError @@ -9,6 +10,25 @@ logger = logging.getLogger(__name__) +_AUTH_BLOCKER_URL_PATTERNS = ( + "/login", + "/authwall", + "/checkpoint", + "/challenge", + "/uas/login", + "/uas/consumer-email-challenge", +) +_LOGIN_TITLE_PATTERNS = ( + "linkedin login", + "sign in | linkedin", +) +_AUTH_BARRIER_TEXT_MARKERS = ( + ("welcome back", "sign in using another account"), + ("welcome back", "join now"), + ("choose an account",), + ("continue as",), +) + async def warm_up_browser(page: Page) -> None: """Visit normal sites to appear more human-like before LinkedIn access.""" @@ -49,15 +69,7 @@ async def is_logged_in(page: Page) -> bool: current_url = page.url # Step 1: Fail-fast on auth blockers - auth_blockers = [ - "/login", - "/authwall", - "/checkpoint", - "/challenge", - "/uas/login", - "/uas/consumer-email-challenge", - ] - if any(pattern in current_url for pattern in auth_blockers): + if any(pattern in current_url for pattern in _AUTH_BLOCKER_URL_PATTERNS): return False # Step 2: Selector check (PRIMARY) @@ -92,6 +104,44 @@ async def is_logged_in(page: Page) -> bool: raise +async def detect_auth_barrier(page: Page) -> str | None: + """Detect LinkedIn auth/account-picker barriers on the current page.""" + try: + current_url = page.url + if any(pattern in current_url for pattern in _AUTH_BLOCKER_URL_PATTERNS): + return f"auth blocker URL: {current_url}" + + try: + title = (await page.title()).strip().lower() + except Exception: + title = "" + if any(pattern in title for pattern in _LOGIN_TITLE_PATTERNS): + return f"login title: {title}" + + try: + body_text = await page.evaluate("() => document.body?.innerText || ''") + except Exception: + body_text = "" + if not isinstance(body_text, str): + body_text = "" + + normalized = re.sub(r"\s+", " ", body_text).strip().lower() + for marker_group in _AUTH_BARRIER_TEXT_MARKERS: + if all(marker in normalized for marker in marker_group): + return f"auth barrier text: {' + '.join(marker_group)}" + + return None + except PlaywrightTimeoutError: + logger.warning( + "Timeout checking auth barrier on %s โ€” continuing without barrier detection", + page.url, + ) + return None + except Exception: + logger.error("Unexpected error checking auth barrier", exc_info=True) + return None + + async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: """Wait for user to manually complete login. diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 3c1abc78..b5da8187 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -9,7 +9,11 @@ from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError -from linkedin_mcp_server.core.exceptions import LinkedInScraperException +from linkedin_mcp_server.core import detect_auth_barrier +from linkedin_mcp_server.core.exceptions import ( + AuthenticationError, + LinkedInScraperException, +) from linkedin_mcp_server.core.utils import ( detect_rate_limit, handle_modal_close, @@ -137,6 +141,36 @@ class LinkedInExtractor: def __init__(self, page: Page): self._page = page + async def _raise_if_auth_barrier( + self, + url: str, + *, + navigation_error: Exception | None = None, + ) -> None: + """Raise an auth error when LinkedIn shows login/account-picker UI.""" + barrier = await detect_auth_barrier(self._page) + if not barrier: + return + + logger.warning("Authentication barrier detected on %s: %s", url, barrier) + message = ( + "LinkedIn requires interactive re-authentication. " + "Run with --login and complete the account selection/sign-in flow." + ) + if navigation_error is not None: + raise AuthenticationError(message) from navigation_error + raise AuthenticationError(message) + + async def _navigate_to_page(self, url: str) -> None: + """Navigate to a LinkedIn page and fail fast on auth barriers.""" + try: + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + except Exception as exc: + await self._raise_if_auth_barrier(url, navigation_error=exc) + raise + + await self._raise_if_auth_barrier(url) + async def extract_page( self, url: str, @@ -174,7 +208,7 @@ async def _extract_page_once( section_name: str | None = None, ) -> ExtractedSection: """Single attempt to navigate, scroll, and extract innerText.""" - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await self._navigate_to_page(url) await detect_rate_limit(self._page) # Wait for main content to render @@ -262,7 +296,7 @@ async def _extract_overlay_once( section_name: str | None = None, ) -> ExtractedSection: """Single attempt to extract content from an overlay/modal page.""" - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await self._navigate_to_page(url) await detect_rate_limit(self._page) # Wait for the dialog/modal to render (LinkedIn uses native ) @@ -475,7 +509,7 @@ async def _extract_search_page_once( section_name: str = "", ) -> ExtractedSection: """Single attempt to navigate, scroll sidebar, and extract innerText.""" - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await self._navigate_to_page(url) await detect_rate_limit(self._page) main_found = True diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py new file mode 100644 index 00000000..93fbb7eb --- /dev/null +++ b/tests/test_core_auth.py @@ -0,0 +1,34 @@ +"""Tests for auth barrier detection helpers.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from linkedin_mcp_server.core.auth import detect_auth_barrier + + +@pytest.mark.asyncio +async def test_detect_auth_barrier_for_account_picker(): + page = MagicMock() + page.url = "https://www.linkedin.com/login" + page.title = AsyncMock(return_value="LinkedIn Login, Sign in | LinkedIn") + page.evaluate = AsyncMock( + return_value="Welcome Back\nSign in using another account\nJoin now" + ) + + result = await detect_auth_barrier(page) + + assert result is not None + assert "auth blocker URL" in result + + +@pytest.mark.asyncio +async def test_detect_auth_barrier_returns_none_for_authenticated_page(): + page = MagicMock() + page.url = "https://www.linkedin.com/feed/" + page.title = AsyncMock(return_value="LinkedIn Feed") + page.evaluate = AsyncMock(return_value="Home\nMy Network\nJobs\nMessaging") + + result = await detect_auth_barrier(page) + + assert result is None diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 7999067e..033d6b38 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -4,6 +4,7 @@ import pytest +from linkedin_mcp_server.core.exceptions import AuthenticationError from linkedin_mcp_server.scraping.extractor import ( ExtractedSection, LinkedInExtractor, @@ -113,6 +114,7 @@ def mock_page(): """Create a mock Patchright page.""" page = MagicMock() page.goto = AsyncMock() + page.title = AsyncMock(return_value="LinkedIn") page.wait_for_selector = AsyncMock() page.evaluate = AsyncMock( return_value={"source": "root", "text": "Sample page text", "references": []} @@ -193,6 +195,20 @@ async def test_extract_page_returns_empty_on_failure(self, mock_page): assert result.text == "" assert result.references == [] + async def test_extract_page_raises_auth_error_for_account_picker(self, mock_page): + mock_page.goto = AsyncMock(side_effect=Exception("net::ERR_TOO_MANY_REDIRECTS")) + extractor = LinkedInExtractor(mock_page) + + with ( + patch( + "linkedin_mcp_server.scraping.extractor.detect_auth_barrier", + new_callable=AsyncMock, + return_value="auth barrier text: welcome back + sign in using another account", + ), + pytest.raises(AuthenticationError, match="--login"), + ): + await extractor.extract_page("https://www.linkedin.com/in/testuser/") + async def test_rate_limit_detected(self, mock_page): from linkedin_mcp_server.core.exceptions import RateLimitError @@ -291,6 +307,24 @@ async def root_content_side_effect(*args, **kwargs): assert result.text == "Education\nHarvard University\n1973 โ€“ 1975" + async def test_extract_search_page_raises_auth_error_for_login_barrier( + self, mock_page + ): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "_navigate_to_page", + new_callable=AsyncMock, + side_effect=AuthenticationError("Run with --login"), + ), + pytest.raises(AuthenticationError, match="--login"), + ): + await extractor._extract_search_page_once( + "https://www.linkedin.com/jobs/search/?keywords=test", + section_name="search_results", + ) + class TestScrapePersonUrls: """Test that scrape_person visits the correct URLs per section set.""" From c61c4290d02288fbce1f1e117c12a9ce1c40a981 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 8 Mar 2026 23:01:34 +0100 Subject: [PATCH 476/565] fix(scraping): tighten rate-limit handling --- linkedin_mcp_server/scraping/extractor.py | 46 ++++---- linkedin_mcp_server/scraping/link_metadata.py | 2 +- tests/test_scraping.py | 103 ++++++++++++++++++ 3 files changed, 131 insertions(+), 20 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index b5da8187..5cb4fb93 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -120,13 +120,7 @@ def strip_linkedin_noise(text: str) -> str: Finds the earliest occurrence of any known noise marker and truncates there. """ - earliest = len(text) - for pattern in _NOISE_MARKERS: - match = pattern.search(text) - if match and match.start() < earliest: - earliest = match.start() - - cleaned = text[:earliest].strip() + cleaned = _truncate_linkedin_noise(text) filtered_lines = [ line for line in cleaned.splitlines() @@ -135,6 +129,17 @@ def strip_linkedin_noise(text: str) -> str: return "\n".join(filtered_lines).strip() +def _truncate_linkedin_noise(text: str) -> str: + """Trim known LinkedIn chrome blocks before any per-line noise filtering.""" + earliest = len(text) + for pattern in _NOISE_MARKERS: + match = pattern.search(text) + if match and match.start() < earliest: + earliest = match.start() + + return text[:earliest].strip() + + class LinkedInExtractor: """Extracts LinkedIn page content via navigate-scroll-innerText pattern.""" @@ -247,12 +252,13 @@ async def _extract_page_once( if not raw: return ExtractedSection(text="", references=[]) - cleaned = strip_linkedin_noise(raw) - if not cleaned and raw.strip(): + truncated = _truncate_linkedin_noise(raw) + if not truncated and raw.strip(): logger.warning( "Page %s returned only LinkedIn chrome (likely rate-limited)", url ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + cleaned = strip_linkedin_noise(raw) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name or ""), @@ -318,13 +324,14 @@ async def _extract_overlay_once( if not raw: return ExtractedSection(text="", references=[]) - cleaned = strip_linkedin_noise(raw) - if not cleaned and raw.strip(): + truncated = _truncate_linkedin_noise(raw) + if not truncated and raw.strip(): logger.warning( "Overlay %s returned only LinkedIn chrome (likely rate-limited)", url, ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + cleaned = strip_linkedin_noise(raw) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name or ""), @@ -359,7 +366,7 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A else: extracted = await self.extract_page(url, section_name=section_name) - if extracted.text: + if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections[section_name] = extracted.text if extracted.references: references[section_name] = extracted.references @@ -407,7 +414,7 @@ async def scrape_company( else: extracted = await self.extract_page(url, section_name=section_name) - if extracted.text: + if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections[section_name] = extracted.text if extracted.references: references[section_name] = extracted.references @@ -435,9 +442,9 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} - if extracted.text: + if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["job_posting"] = extracted.text - if extracted.references: + if extracted.text != _RATE_LIMITED_MSG and extracted.references: references["job_posting"] = extracted.references result: dict[str, Any] = { @@ -535,13 +542,14 @@ async def _extract_search_page_once( if not raw: return ExtractedSection(text="", references=[]) - cleaned = strip_linkedin_noise(raw) - if not cleaned and raw.strip(): + truncated = _truncate_linkedin_noise(raw) + if not truncated and raw.strip(): logger.warning( "Search page %s returned only LinkedIn chrome (likely rate-limited)", url, ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + cleaned = strip_linkedin_noise(raw) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name), @@ -762,9 +770,9 @@ async def search_people( sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} - if extracted.text: + if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["search_results"] = extracted.text - if extracted.references: + if extracted.text != _RATE_LIMITED_MSG and extracted.references: references["search_results"] = extracted.references result: dict[str, Any] = { diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 173944c0..e40144e3 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -359,7 +359,7 @@ def _reference_score(reference: Reference) -> tuple[int, int, int | float]: def _label_sort_key(label: str) -> tuple[int, int]: - """Prefer concise labels, but avoid low-signal 1-2 character strings.""" + """Prefer concise labels, but deprioritize short 2-character strings.""" return (1 if len(label) < 3 else 0, len(label)) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 033d6b38..e7983442 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -9,6 +9,7 @@ ExtractedSection, LinkedInExtractor, _RATE_LIMITED_MSG, + _truncate_linkedin_noise, strip_linkedin_noise, ) from linkedin_mcp_server.scraping.link_metadata import Reference @@ -116,6 +117,7 @@ def mock_page(): page.goto = AsyncMock() page.title = AsyncMock(return_value="LinkedIn") page.wait_for_selector = AsyncMock() + page.wait_for_function = AsyncMock() page.evaluate = AsyncMock( return_value={"source": "root", "text": "Sample page text", "references": []} ) @@ -307,6 +309,40 @@ async def root_content_side_effect(*args, **kwargs): assert result.text == "Education\nHarvard University\n1973 โ€“ 1975" + async def test_media_only_controls_are_not_misclassified_as_rate_limited( + self, mock_page + ): + mock_page.evaluate = AsyncMock( + return_value={ + "source": "root", + "text": "Play\nLoaded: 100.00%\nRemaining time 0:07\nShow captions", + "references": [], + } + ) + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor._extract_page_once( + "https://www.linkedin.com/in/testuser/recent-activity/all/", + section_name="posts", + ) + + assert result.text == "" + assert result.references == [] + async def test_extract_search_page_raises_auth_error_for_login_barrier( self, mock_page ): @@ -577,6 +613,34 @@ async def extract_with_failure(url, *args, **kwargs): assert "education" in result["sections"] assert "experience" not in result["sections"] + async def test_rate_limited_sections_are_omitted(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + side_effect=[ + extracted(_RATE_LIMITED_MSG), + extracted("Post text"), + ], + ), + patch.object( + extractor, + "_extract_overlay", + new_callable=AsyncMock, + return_value=extracted(""), + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.scrape_person("testuser", {"posts"}) + + assert "main_profile" not in result["sections"] + assert result["sections"]["posts"] == "Post text" + class TestScrapeCompany: async def test_company_baseline_always_included(self, mock_page): @@ -648,6 +712,28 @@ async def test_all_sections_visit_correct_urls(self, mock_page): assert any("/jobs/" in u for u in urls) assert set(result["sections"]) == {"about", "posts", "jobs"} + async def test_rate_limited_company_sections_are_omitted(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + side_effect=[ + extracted(_RATE_LIMITED_MSG), + extracted("Posts text"), + ], + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.scrape_company("testcorp", {"posts"}) + + assert "about" not in result["sections"] + assert result["sections"]["posts"] == "Posts text" + class TestScrapeJob: async def test_scrape_job(self, mock_page): @@ -665,6 +751,18 @@ async def test_scrape_job(self, mock_page): assert "pages_visited" not in result assert "sections_requested" not in result + async def test_scrape_job_omits_rate_limited_sentinel(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted(_RATE_LIMITED_MSG), + ): + result = await extractor.scrape_job("12345") + + assert result["sections"] == {} + class TestSearchJobs: """Tests for search_jobs with job ID extraction and pagination.""" @@ -1181,6 +1279,11 @@ def test_no_noise_returns_unchanged(self): def test_empty_string(self): assert strip_linkedin_noise("") == "" + def test_truncate_noise_preserves_media_controls_for_rate_limit_detection(self): + text = "Play\nLoaded: 100.00%\nRemaining time 0:07\nShow captions" + assert _truncate_linkedin_noise(text) == text + assert strip_linkedin_noise(text) == "" + def test_about_in_profile_content_not_stripped(self): """'About' followed by actual content (not 'Accessibility') should be preserved.""" text = "About\nChair of the Gates Foundation.\n\nFeatured\nPost" From 0b39227ee7ccc937a52310d423b50b46e6cf88ef Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 01:34:08 +0100 Subject: [PATCH 477/565] fix(scraping): polish reference quality --- linkedin_mcp_server/scraping/link_metadata.py | 17 ++-- linkedin_mcp_server/tools/company.py | 8 +- tests/test_link_metadata.py | 87 +++++++++++++++++++ tests/test_tools.py | 17 +++- 4 files changed, 119 insertions(+), 10 deletions(-) diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index e40144e3..bdab4bbd 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -187,7 +187,7 @@ def classify_link(href: str) -> tuple[ReferenceKind, str] | None: if not _is_linkedin_host(host): return "external", urlunparse( - (parsed.scheme, parsed.netloc, parsed.path or "/", "", parsed.query, "") + (parsed.scheme, parsed.netloc, parsed.path or "/", "", "", "") ) if _is_linkedin_chrome(path): @@ -249,7 +249,12 @@ def clean_label(value: str, kind: ReferenceKind) -> str | None: if not value: return None - value = re.sub(r"^(?:View:|View|Open article:)\s*", "", value, flags=re.IGNORECASE) + value = re.sub( + r"^(?:View:\s*|View\b\s+|Open article:\s*)", + "", + value, + flags=re.IGNORECASE, + ) value = re.sub(r"[โ€™']s\s+graphic link$", "", value, flags=re.IGNORECASE) value = re.sub(r"\s+graphic link$", "", value, flags=re.IGNORECASE) value = value.strip(" :-") @@ -354,7 +359,7 @@ def _reference_score(reference: Reference) -> tuple[int, int, int | float]: return ( 1 if text else 0, 1 if context else 0, - _missing_text_penalty(text), + _text_score(text), ) @@ -363,9 +368,9 @@ def _label_sort_key(label: str) -> tuple[int, int]: return (1 if len(label) < 3 else 0, len(label)) -def _missing_text_penalty(text: str | None) -> int | float: - """Score missing text as strictly worse than any text-bearing reference.""" - return -len(text) if text else float("-inf") +def _text_score(text: str | None) -> int | float: + """Prefer richer labels while scoring missing text as strictly worst.""" + return len(text) if text else float("-inf") def _is_linkedin_chrome(path: str) -> bool: diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 85713d90..c38ebe67 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -15,6 +15,8 @@ from linkedin_mcp_server.dependencies import get_extractor from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections +from linkedin_mcp_server.scraping.extractor import _RATE_LIMITED_MSG +from linkedin_mcp_server.scraping.link_metadata import Reference logger = logging.getLogger(__name__) @@ -109,10 +111,10 @@ async def get_company_posts( extracted = await extractor.extract_page(url, section_name="posts") sections: dict[str, str] = {} - references: dict[str, Any] = {} - if extracted.text: + references: dict[str, list[Reference]] = {} + if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["posts"] = extracted.text - if extracted.references: + if extracted.text != _RATE_LIMITED_MSG and extracted.references: references["posts"] = extracted.references await ctx.report_progress(progress=100, total=100, message="Complete") diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index d805f7a2..628926c2 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -110,6 +110,30 @@ def test_drops_non_http_external_schemes(self): } ] + def test_dedupes_external_tracking_variants(self): + references = build_references( + [ + { + "href": "https://example.com/report?utm_source=linkedin", + "text": "Report", + }, + { + "href": "https://example.com/report?utm_source=share", + "text": "Detailed annual report", + }, + ], + "posts", + ) + + assert references == [ + { + "kind": "external", + "url": "https://example.com/report", + "text": "Detailed annual report", + "context": "post attachment", + } + ] + def test_prefers_cleaner_duplicate_label(self): references = build_references( [ @@ -177,6 +201,26 @@ def test_rejects_single_character_labels(self): } ] + def test_preserves_words_starting_with_view(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/company/viewpoint-economics/", + "text": "Viewpoint Economics", + } + ], + "about", + ) + + assert references == [ + { + "kind": "company", + "url": "/company/viewpoint-economics/", + "text": "Viewpoint Economics", + "context": "top card", + } + ] + def test_prefers_company_post_context_for_feed_posts(self): references = build_references( [ @@ -222,6 +266,25 @@ def test_drops_social_proof_company_labels(self): } ] + def test_drops_nav_and_footer_anchors(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/in/williamhgates/", + "text": "Bill Gates", + "in_nav": True, + }, + { + "href": "https://www.linkedin.com/company/gates-foundation/", + "text": "Gates Foundation", + "in_footer": True, + }, + ], + "main_profile", + ) + + assert references == [] + def test_caps_results_per_section(self): raw: list[RawReference] = [ { @@ -237,6 +300,30 @@ def test_caps_results_per_section(self): assert references[0]["url"] == "/company/test-0/" assert references[-1]["url"] == "/company/test-11/" + def test_prefers_richer_duplicate_text(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/jobs/view/12345/", + "text": "Job", + }, + { + "href": "https://www.linkedin.com/jobs/view/12345/", + "text": "Senior Software Engineer", + }, + ], + "search_results", + ) + + assert references == [ + { + "kind": "job", + "url": "/jobs/view/12345/", + "text": "Senior Software Engineer", + "context": "job result", + } + ] + def test_uses_search_result_contexts(self): references = build_references( [ diff --git a/tests/test_tools.py b/tests/test_tools.py index 94ac2a77..e8ea93d9 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -4,7 +4,7 @@ import pytest from fastmcp import FastMCP -from linkedin_mcp_server.scraping.extractor import ExtractedSection +from linkedin_mcp_server.scraping.extractor import ExtractedSection, _RATE_LIMITED_MSG async def get_tool_fn( @@ -223,6 +223,21 @@ async def test_get_company_posts(self, mock_context): assert "pages_visited" not in result assert "sections_requested" not in result + async def test_get_company_posts_omits_rate_limited_sentinel(self, mock_context): + mock_extractor = MagicMock() + mock_extractor.extract_page = AsyncMock( + return_value=ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_posts") + result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) + assert result["sections"] == {} + class TestJobTools: async def test_get_job_details(self, mock_context): From f3e83c3d0533360031ce4e063e1cc685039a465a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 01:44:50 +0100 Subject: [PATCH 478/565] fix(scraping): drop orphaned references --- linkedin_mcp_server/scraping/extractor.py | 8 ++--- linkedin_mcp_server/tools/company.py | 4 +-- tests/test_scraping.py | 40 +++++++++++++++++++++++ tests/test_tools.py | 25 ++++++++++++++ 4 files changed, 71 insertions(+), 6 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 5cb4fb93..3fd79dfb 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -444,8 +444,8 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: references: dict[str, list[Reference]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["job_posting"] = extracted.text - if extracted.text != _RATE_LIMITED_MSG and extracted.references: - references["job_posting"] = extracted.references + if extracted.references: + references["job_posting"] = extracted.references result: dict[str, Any] = { "url": url, @@ -772,8 +772,8 @@ async def search_people( references: dict[str, list[Reference]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["search_results"] = extracted.text - if extracted.text != _RATE_LIMITED_MSG and extracted.references: - references["search_results"] = extracted.references + if extracted.references: + references["search_results"] = extracted.references result: dict[str, Any] = { "url": url, diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index c38ebe67..9e938e60 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -114,8 +114,8 @@ async def get_company_posts( references: dict[str, list[Reference]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["posts"] = extracted.text - if extracted.text != _RATE_LIMITED_MSG and extracted.references: - references["posts"] = extracted.references + if extracted.references: + references["posts"] = extracted.references await ctx.report_progress(progress=100, total=100, message="Complete") diff --git a/tests/test_scraping.py b/tests/test_scraping.py index e7983442..0bc055fa 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -763,6 +763,24 @@ async def test_scrape_job_omits_rate_limited_sentinel(self, mock_page): assert result["sections"] == {} + async def test_scrape_job_omits_orphaned_references_when_text_empty( + self, mock_page + ): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted( + "", + [{"kind": "job", "url": "/jobs/view/12345/", "text": "Engineer"}], + ), + ): + result = await extractor.scrape_job("12345") + + assert result["sections"] == {} + assert "references" not in result + class TestSearchJobs: """Tests for search_jobs with job ID extraction and pagination.""" @@ -1250,6 +1268,28 @@ async def test_rate_limited_skips_ids_and_text(self, mock_page): assert result["sections"] == {} mock_ids.assert_not_awaited() + async def test_search_people_omits_orphaned_references(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + return_value=extracted( + "", + [ + { + "kind": "person", + "url": "/in/testuser/", + "text": "Test User", + } + ], + ), + ): + result = await extractor.search_people("python") + + assert result["sections"] == {} + assert "references" not in result + class TestStripLinkedInNoise: def test_strips_footer(self): diff --git a/tests/test_tools.py b/tests/test_tools.py index e8ea93d9..da7db8fa 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -238,6 +238,31 @@ async def test_get_company_posts_omits_rate_limited_sentinel(self, mock_context) result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) assert result["sections"] == {} + async def test_get_company_posts_omits_orphaned_references(self, mock_context): + mock_extractor = MagicMock() + mock_extractor.extract_page = AsyncMock( + return_value=ExtractedSection( + text="", + references=[ + { + "kind": "company", + "url": "/company/testcorp/", + "text": "TestCorp", + } + ], + ) + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_posts") + result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) + assert result["sections"] == {} + assert "references" not in result + class TestJobTools: async def test_get_job_details(self, mock_context): From 917301712f5efb68c0aa7e4115d11393e815e64d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 01:53:45 +0100 Subject: [PATCH 479/565] fix(scraping): bound redirect unwrapping --- linkedin_mcp_server/scraping/link_metadata.py | 8 +++++-- tests/test_link_metadata.py | 22 +++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index bdab4bbd..caf63eef 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -100,6 +100,7 @@ class RawReference(TypedDict, total=False): _NEWSLETTER_PATH_RE = re.compile(r"^/newsletters/([^/?#]+)") _PULSE_PATH_RE = re.compile(r"^/pulse/([^/?#]+)") _FEED_PATH_RE = re.compile(r"^/feed/update/([^/?#]+)") +_MAX_REDIRECT_UNWRAP_DEPTH = 5 def build_references( @@ -153,8 +154,11 @@ def normalize_reference( return reference -def normalize_url(href: str) -> str | None: +def normalize_url(href: str, _depth: int = 0) -> str | None: """Normalize a raw href and unwrap LinkedIn redirect URLs.""" + if _depth > _MAX_REDIRECT_UNWRAP_DEPTH: + return None + href = href.strip() if not href or href.startswith("#"): return None @@ -171,7 +175,7 @@ def normalize_url(href: str) -> str | None: target = unquote((parse_qs(parsed.query).get("url") or [""])[0]).strip() if not target: return None - return normalize_url(target) + return normalize_url(target, _depth + 1) if not parsed.scheme: return None diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 628926c2..8d723d10 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -1,9 +1,12 @@ """Tests for compact LinkedIn reference extraction helpers.""" +from urllib.parse import quote + from linkedin_mcp_server.scraping.link_metadata import ( RawReference, build_references, dedupe_references, + normalize_url, ) @@ -159,6 +162,25 @@ def test_prefers_cleaner_duplicate_label(self): } ] + def test_normalize_url_unwraps_nested_redirects_within_cap(self): + target = "https://example.com/report" + nested = "https://www.linkedin.com/redir/redirect/?url=" + quote( + "https://www.linkedin.com/redir/redirect/?url=" + quote(target, safe=""), + safe="", + ) + + assert normalize_url(nested) == target + + def test_normalize_url_drops_redirect_chain_beyond_cap(self): + target = "https://example.com/report" + href = target + for _ in range(7): + href = "https://www.linkedin.com/redir/redirect/?url=" + quote( + href, safe="" + ) + + assert normalize_url(href) is None + def test_prefers_shorter_clean_label_over_merged_visible_text(self): references = build_references( [ From 45bee6270e64b8221d8561942bfc5d1a82174eae Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 02:05:01 +0100 Subject: [PATCH 480/565] fix(auth): narrow barrier text match --- linkedin_mcp_server/core/auth.py | 2 +- tests/test_core_auth.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 21bbe567..e08a1871 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -26,7 +26,7 @@ ("welcome back", "sign in using another account"), ("welcome back", "join now"), ("choose an account",), - ("continue as",), + ("continue as", "sign in using another account"), ) diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index 93fbb7eb..156362d8 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -22,6 +22,20 @@ async def test_detect_auth_barrier_for_account_picker(): assert "auth blocker URL" in result +@pytest.mark.asyncio +async def test_detect_auth_barrier_for_continue_as_account_picker(): + page = MagicMock() + page.url = "https://www.linkedin.com/checkpoint/lg/login-submit" + page.title = AsyncMock(return_value="LinkedIn Sign In") + page.evaluate = AsyncMock( + return_value="Continue as Daniel Sticker\nSign in using another account" + ) + + result = await detect_auth_barrier(page) + + assert result is not None + + @pytest.mark.asyncio async def test_detect_auth_barrier_returns_none_for_authenticated_page(): page = MagicMock() @@ -32,3 +46,17 @@ async def test_detect_auth_barrier_returns_none_for_authenticated_page(): result = await detect_auth_barrier(page) assert result is None + + +@pytest.mark.asyncio +async def test_detect_auth_barrier_ignores_continue_as_in_page_content(): + page = MagicMock() + page.url = "https://www.linkedin.com/jobs/view/123456/" + page.title = AsyncMock(return_value="Software Engineer at Acme - LinkedIn") + page.evaluate = AsyncMock( + return_value="We need someone to continue as a senior engineer on our team." + ) + + result = await detect_auth_barrier(page) + + assert result is None From bd128af0b3abf7efda4459d84327bdd90dd173b0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 02:07:07 +0100 Subject: [PATCH 481/565] fix(auth): tighten blocker matching --- linkedin_mcp_server/core/auth.py | 18 ++++++++++++++++-- linkedin_mcp_server/scraping/extractor.py | 13 +++++++++---- tests/test_core_auth.py | 12 ++++++++++++ 3 files changed, 37 insertions(+), 6 deletions(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index e08a1871..35f1daa6 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -3,6 +3,7 @@ import asyncio import logging import re +from urllib.parse import urlparse from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError @@ -69,7 +70,7 @@ async def is_logged_in(page: Page) -> bool: current_url = page.url # Step 1: Fail-fast on auth blockers - if any(pattern in current_url for pattern in _AUTH_BLOCKER_URL_PATTERNS): + if _is_auth_blocker_url(current_url): return False # Step 2: Selector check (PRIMARY) @@ -108,7 +109,7 @@ async def detect_auth_barrier(page: Page) -> str | None: """Detect LinkedIn auth/account-picker barriers on the current page.""" try: current_url = page.url - if any(pattern in current_url for pattern in _AUTH_BLOCKER_URL_PATTERNS): + if _is_auth_blocker_url(current_url): return f"auth blocker URL: {current_url}" try: @@ -142,6 +143,19 @@ async def detect_auth_barrier(page: Page) -> str | None: return None +def _is_auth_blocker_url(url: str) -> bool: + """Return True only for real auth routes, not arbitrary slug substrings.""" + path = urlparse(url).path or "/" + + if path in _AUTH_BLOCKER_URL_PATTERNS: + return True + + return any( + path == f"{pattern}/" or path.startswith(f"{pattern}/") + for pattern in _AUTH_BLOCKER_URL_PATTERNS + ) + + async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: """Wait for user to manually complete login. diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 3fd79dfb..a401b0be 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -121,9 +121,14 @@ def strip_linkedin_noise(text: str) -> str: Finds the earliest occurrence of any known noise marker and truncates there. """ cleaned = _truncate_linkedin_noise(text) + return _filter_linkedin_noise_lines(cleaned) + + +def _filter_linkedin_noise_lines(text: str) -> str: + """Remove known media/control noise lines from already-truncated content.""" filtered_lines = [ line - for line in cleaned.splitlines() + for line in text.splitlines() if not any(pattern.match(line.strip()) for pattern in _NOISE_LINES) ] return "\n".join(filtered_lines).strip() @@ -258,7 +263,7 @@ async def _extract_page_once( "Page %s returned only LinkedIn chrome (likely rate-limited)", url ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) - cleaned = strip_linkedin_noise(raw) + cleaned = _filter_linkedin_noise_lines(truncated) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name or ""), @@ -331,7 +336,7 @@ async def _extract_overlay_once( url, ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) - cleaned = strip_linkedin_noise(raw) + cleaned = _filter_linkedin_noise_lines(truncated) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name or ""), @@ -549,7 +554,7 @@ async def _extract_search_page_once( url, ) return ExtractedSection(text=_RATE_LIMITED_MSG, references=[]) - cleaned = strip_linkedin_noise(raw) + cleaned = _filter_linkedin_noise_lines(truncated) return ExtractedSection( text=cleaned, references=build_references(raw_result["references"], section_name), diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index 156362d8..ef688960 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -60,3 +60,15 @@ async def test_detect_auth_barrier_ignores_continue_as_in_page_content(): result = await detect_auth_barrier(page) assert result is None + + +@pytest.mark.asyncio +async def test_detect_auth_barrier_ignores_auth_substrings_in_slugs(): + page = MagicMock() + page.url = "https://www.linkedin.com/company/challenge-labs/" + page.title = AsyncMock(return_value="Challenge Labs | LinkedIn") + page.evaluate = AsyncMock(return_value="Challenge Labs builds developer tools.") + + result = await detect_auth_barrier(page) + + assert result is None From 7e3446cdc361380007acfff43e4f0841b2b7c433 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 08:45:26 +0100 Subject: [PATCH 482/565] perf(scraping): cap dom reference scan --- linkedin_mcp_server/scraping/extractor.py | 2 ++ linkedin_mcp_server/scraping/link_metadata.py | 4 ++- tests/test_link_metadata.py | 33 +++++++++++++++++++ tests/test_scraping.py | 2 ++ 4 files changed, 40 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index a401b0be..5d64b8a2 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -800,6 +800,7 @@ async def _extract_root_content( const headingSelector = 'h1, h2, h3'; const directHeadingSelector = ':scope > h1, :scope > h2, :scope > h3'; const MAX_HEADING_CONTAINERS = 300; + const MAX_REFERENCE_ANCHORS = 500; const getHeadingText = element => { if (!element) return ''; @@ -866,6 +867,7 @@ async def _extract_root_content( }; const references = Array.from(container.querySelectorAll('a[href]')) + .slice(0, MAX_REFERENCE_ANCHORS) .map(anchor => { const rawHref = (anchor.getAttribute('href') || '').trim(); if (!rawHref || rawHref === '#') { diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index caf63eef..479e243d 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -198,7 +198,9 @@ def classify_link(href: str) -> tuple[ReferenceKind, str] | None: return None if match := _PERSON_PATH_RE.match(path): - if "/overlay/" in path or "/details/" in path or "/recent-activity/" in path: + person_suffix = path[match.end() :].lstrip("/") + first_suffix_segment = person_suffix.split("/", 1)[0] if person_suffix else "" + if first_suffix_segment in {"overlay", "details", "recent-activity"}: return None return "person", f"/in/{match.group(1)}/" diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 8d723d10..fc9b500e 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -53,6 +53,39 @@ def test_canonicalizes_and_types_linkedin_urls(self): }, ] + def test_preserves_person_slug_named_details(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/in/details/", + "text": "Details Person", + } + ], + "main_profile", + ) + + assert references == [ + { + "kind": "person", + "url": "/in/details/", + "text": "Details Person", + "context": "top card", + } + ] + + def test_drops_person_details_subpage(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/in/williamhgates/details/experience/", + "text": "Bill Gates", + } + ], + "main_profile", + ) + + assert references == [] + def test_unwraps_redirect_and_drops_junk(self): references = build_references( [ diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 0bc055fa..34bdaec5 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -183,9 +183,11 @@ async def test_root_content_filters_empty_href_before_resolution(self, mock_page assert await_args is not None script = await_args.args[0] assert "MAX_HEADING_CONTAINERS = 300" in script + assert "MAX_REFERENCE_ANCHORS = 500" in script assert "const getPreviousHeading = node =>" in script assert "index < 3" in script assert "if (!rawHref || rawHref === '#')" in script + assert ".slice(0, MAX_REFERENCE_ANCHORS)" in script assert "in_list" not in script assert ".filter(Boolean);" in script From 94cdfc5ca5c490a78c7331ae4b03131c0f2c8ce4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 09:10:30 +0100 Subject: [PATCH 483/565] fix(auth): narrow account chooser match --- linkedin_mcp_server/core/auth.py | 2 +- tests/test_core_auth.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 35f1daa6..098ea7d0 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -26,7 +26,7 @@ _AUTH_BARRIER_TEXT_MARKERS = ( ("welcome back", "sign in using another account"), ("welcome back", "join now"), - ("choose an account",), + ("choose an account", "sign in using another account"), ("continue as", "sign in using another account"), ) diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index ef688960..57f3441c 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -36,6 +36,20 @@ async def test_detect_auth_barrier_for_continue_as_account_picker(): assert result is not None +@pytest.mark.asyncio +async def test_detect_auth_barrier_for_choose_account_picker(): + page = MagicMock() + page.url = "https://www.linkedin.com/checkpoint/lg/login-submit" + page.title = AsyncMock(return_value="LinkedIn Sign In") + page.evaluate = AsyncMock( + return_value="Choose an account\nSign in using another account" + ) + + result = await detect_auth_barrier(page) + + assert result is not None + + @pytest.mark.asyncio async def test_detect_auth_barrier_returns_none_for_authenticated_page(): page = MagicMock() @@ -62,6 +76,20 @@ async def test_detect_auth_barrier_ignores_continue_as_in_page_content(): assert result is None +@pytest.mark.asyncio +async def test_detect_auth_barrier_ignores_choose_account_in_page_content(): + page = MagicMock() + page.url = "https://www.linkedin.com/jobs/view/123456/" + page.title = AsyncMock(return_value="Software Engineer at Acme - LinkedIn") + page.evaluate = AsyncMock( + return_value="You will choose an account strategy for the next quarter." + ) + + result = await detect_auth_barrier(page) + + assert result is None + + @pytest.mark.asyncio async def test_detect_auth_barrier_ignores_auth_substrings_in_slugs(): page = MagicMock() From 0dce622f8aba31a710537e563a94a3d620e6403e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 09:41:39 +0100 Subject: [PATCH 484/565] chore(scraping): make section caps explicit --- linkedin_mcp_server/scraping/link_metadata.py | 9 +++++- tests/test_link_metadata.py | 28 +++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index 479e243d..fdc41e00 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -80,10 +80,17 @@ class RawReference(TypedDict, total=False): "job_posting": "job result", } +_DEFAULT_REFERENCE_CAP = 12 _REFERENCE_CAPS = { "main_profile": 12, "about": 12, + "experience": 12, + "education": 12, + "interests": 12, + "honors": 12, + "languages": 12, "posts": 12, + "jobs": 8, "search_results": 15, "job_posting": 8, "contact_info": 8, @@ -108,7 +115,7 @@ def build_references( section_name: str, ) -> list[Reference]: """Filter and normalize raw DOM anchors into compact references.""" - cap = _REFERENCE_CAPS.get(section_name, 12) + cap = _REFERENCE_CAPS.get(section_name, _DEFAULT_REFERENCE_CAP) normalized_references: list[Reference] = [] for raw in raw_references: diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index fc9b500e..9e4593f1 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -355,6 +355,34 @@ def test_caps_results_per_section(self): assert references[0]["url"] == "/company/test-0/" assert references[-1]["url"] == "/company/test-11/" + def test_caps_jobs_section_more_tightly(self): + raw: list[RawReference] = [ + { + "href": f"https://www.linkedin.com/jobs/view/{idx}/", + "text": f"Job {idx}", + } + for idx in range(20) + ] + + references = build_references(raw, "jobs") + + assert len(references) == 8 + assert references[0]["url"] == "/jobs/view/0/" + assert references[-1]["url"] == "/jobs/view/7/" + + def test_uses_default_cap_for_unknown_section(self): + raw: list[RawReference] = [ + { + "href": f"https://www.linkedin.com/company/test-{idx}/", + "text": f"Company {idx}", + } + for idx in range(20) + ] + + references = build_references(raw, "unknown_section") + + assert len(references) == 12 + def test_prefers_richer_duplicate_text(self): references = build_references( [ From 8dcf45b3287dd642ff0bc7d5e57551b630be58f2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 10:16:36 +0100 Subject: [PATCH 485/565] perf(scraping): trim auth barrier checks --- linkedin_mcp_server/core/__init__.py | 2 ++ linkedin_mcp_server/core/auth.py | 20 ++++++++++++++ linkedin_mcp_server/scraping/extractor.py | 26 +++++++++++------- tests/test_core_auth.py | 18 ++++++++++++- tests/test_scraping.py | 33 ++++++++++++++++------- 5 files changed, 80 insertions(+), 19 deletions(-) diff --git a/linkedin_mcp_server/core/__init__.py b/linkedin_mcp_server/core/__init__.py index caf36726..326d3e32 100644 --- a/linkedin_mcp_server/core/__init__.py +++ b/linkedin_mcp_server/core/__init__.py @@ -2,6 +2,7 @@ from .auth import ( detect_auth_barrier, + detect_auth_barrier_quick, is_logged_in, wait_for_manual_login, warm_up_browser, @@ -22,6 +23,7 @@ "AuthenticationError", "BrowserManager", "detect_auth_barrier", + "detect_auth_barrier_quick", "ElementNotFoundError", "LinkedInScraperException", "NetworkError", diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 098ea7d0..25e69bbc 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -106,6 +106,15 @@ async def is_logged_in(page: Page) -> bool: async def detect_auth_barrier(page: Page) -> str | None: + """Detect LinkedIn auth/account-picker barriers on the current page.""" + return await _detect_auth_barrier(page, include_body_text=True) + + +async def _detect_auth_barrier( + page: Page, + *, + include_body_text: bool, +) -> str | None: """Detect LinkedIn auth/account-picker barriers on the current page.""" try: current_url = page.url @@ -119,6 +128,9 @@ async def detect_auth_barrier(page: Page) -> str | None: if any(pattern in title for pattern in _LOGIN_TITLE_PATTERNS): return f"login title: {title}" + if not include_body_text: + return None + try: body_text = await page.evaluate("() => document.body?.innerText || ''") except Exception: @@ -143,6 +155,14 @@ async def detect_auth_barrier(page: Page) -> str | None: return None +async def detect_auth_barrier_quick(page: Page) -> str | None: + """Cheap auth-barrier check for normal navigations. + + Uses URL and title only, avoiding a full body-text fetch on healthy pages. + """ + return await _detect_auth_barrier(page, include_body_text=False) + + def _is_auth_blocker_url(url: str) -> bool: """Return True only for real auth routes, not arbitrary slug substrings.""" path = urlparse(url).path or "/" diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 5d64b8a2..012e0c70 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -9,7 +9,7 @@ from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError -from linkedin_mcp_server.core import detect_auth_barrier +from linkedin_mcp_server.core import detect_auth_barrier, detect_auth_barrier_quick from linkedin_mcp_server.core.exceptions import ( AuthenticationError, LinkedInScraperException, @@ -179,12 +179,20 @@ async def _navigate_to_page(self, url: str) -> None: await self._raise_if_auth_barrier(url, navigation_error=exc) raise - await self._raise_if_auth_barrier(url) + barrier = await detect_auth_barrier_quick(self._page) + if not barrier: + return + + logger.warning("Authentication barrier detected on %s: %s", url, barrier) + raise AuthenticationError( + "LinkedIn requires interactive re-authentication. " + "Run with --login and complete the account selection/sign-in flow." + ) async def extract_page( self, url: str, - section_name: str | None = None, + section_name: str, ) -> ExtractedSection: """Navigate to a URL, scroll to load lazy content, and extract innerText. @@ -215,7 +223,7 @@ async def extract_page( async def _extract_page_once( self, url: str, - section_name: str | None = None, + section_name: str, ) -> ExtractedSection: """Single attempt to navigate, scroll, and extract innerText.""" await self._navigate_to_page(url) @@ -266,13 +274,13 @@ async def _extract_page_once( cleaned = _filter_linkedin_noise_lines(truncated) return ExtractedSection( text=cleaned, - references=build_references(raw_result["references"], section_name or ""), + references=build_references(raw_result["references"], section_name), ) async def _extract_overlay( self, url: str, - section_name: str | None = None, + section_name: str, ) -> ExtractedSection: """Extract content from an overlay/modal page (e.g. contact info). @@ -304,7 +312,7 @@ async def _extract_overlay( async def _extract_overlay_once( self, url: str, - section_name: str | None = None, + section_name: str, ) -> ExtractedSection: """Single attempt to extract content from an overlay/modal page.""" await self._navigate_to_page(url) @@ -339,7 +347,7 @@ async def _extract_overlay_once( cleaned = _filter_linkedin_noise_lines(truncated) return ExtractedSection( text=cleaned, - references=build_references(raw_result["references"], section_name or ""), + references=build_references(raw_result["references"], section_name), ) async def scrape_person(self, username: str, requested: set[str]) -> dict[str, Any]: @@ -518,7 +526,7 @@ async def _extract_search_page( async def _extract_search_page_once( self, url: str, - section_name: str = "", + section_name: str, ) -> ExtractedSection: """Single attempt to navigate, scroll sidebar, and extract innerText.""" await self._navigate_to_page(url) diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index 57f3441c..8bb4e03c 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -4,7 +4,10 @@ import pytest -from linkedin_mcp_server.core.auth import detect_auth_barrier +from linkedin_mcp_server.core.auth import ( + detect_auth_barrier, + detect_auth_barrier_quick, +) @pytest.mark.asyncio @@ -62,6 +65,19 @@ async def test_detect_auth_barrier_returns_none_for_authenticated_page(): assert result is None +@pytest.mark.asyncio +async def test_detect_auth_barrier_quick_skips_body_text_on_authenticated_page(): + page = MagicMock() + page.url = "https://www.linkedin.com/feed/" + page.title = AsyncMock(return_value="LinkedIn Feed") + page.evaluate = AsyncMock(return_value="Home\nMy Network\nJobs\nMessaging") + + result = await detect_auth_barrier_quick(page) + + assert result is None + page.evaluate.assert_not_awaited() + + @pytest.mark.asyncio async def test_detect_auth_barrier_ignores_continue_as_in_page_content(): page = MagicMock() diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 34bdaec5..505d16b9 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -160,7 +160,8 @@ async def test_extract_page_returns_text(self, mock_page): ), ): result = await extractor.extract_page( - "https://www.linkedin.com/in/testuser/" + "https://www.linkedin.com/in/testuser/", + section_name="main_profile", ) assert result.text == "Sample profile text" @@ -195,7 +196,10 @@ async def test_extract_page_returns_empty_on_failure(self, mock_page): mock_page.goto = AsyncMock(side_effect=Exception("Network error")) extractor = LinkedInExtractor(mock_page) - result = await extractor.extract_page("https://www.linkedin.com/in/bad/") + result = await extractor.extract_page( + "https://www.linkedin.com/in/bad/", + section_name="main_profile", + ) assert result.text == "" assert result.references == [] @@ -211,7 +215,10 @@ async def test_extract_page_raises_auth_error_for_account_picker(self, mock_page ), pytest.raises(AuthenticationError, match="--login"), ): - await extractor.extract_page("https://www.linkedin.com/in/testuser/") + await extractor.extract_page( + "https://www.linkedin.com/in/testuser/", + section_name="main_profile", + ) async def test_rate_limit_detected(self, mock_page): from linkedin_mcp_server.core.exceptions import RateLimitError @@ -225,7 +232,10 @@ async def test_rate_limit_detected(self, mock_page): ), pytest.raises(RateLimitError), ): - await extractor.extract_page("https://www.linkedin.com/in/testuser/") + await extractor.extract_page( + "https://www.linkedin.com/in/testuser/", + section_name="main_profile", + ) async def test_returns_rate_limited_msg_after_retry(self, mock_page): """When both attempts return only noise, surface rate limit message.""" @@ -258,7 +268,8 @@ async def test_returns_rate_limited_msg_after_retry(self, mock_page): ), ): result = await extractor.extract_page( - "https://www.linkedin.com/in/testuser/details/experience/" + "https://www.linkedin.com/in/testuser/details/experience/", + section_name="experience", ) assert result.text == _RATE_LIMITED_MSG @@ -306,7 +317,8 @@ async def root_content_side_effect(*args, **kwargs): ), ): result = await extractor.extract_page( - "https://www.linkedin.com/in/testuser/details/education/" + "https://www.linkedin.com/in/testuser/details/education/", + section_name="education", ) assert result.text == "Education\nHarvard University\n1973 โ€“ 1975" @@ -1396,7 +1408,8 @@ async def test_activity_page_waits_for_content_and_uses_slow_scroll( ), ): result = await extractor._extract_page_once( - "https://www.linkedin.com/in/billgates/recent-activity/all/" + "https://www.linkedin.com/in/billgates/recent-activity/all/", + section_name="posts", ) mock_page.wait_for_function.assert_awaited_once() @@ -1429,7 +1442,8 @@ async def test_non_activity_page_skips_wait_and_uses_fast_scroll(self, mock_page ), ): await extractor._extract_page_once( - "https://www.linkedin.com/in/billgates/details/experience/" + "https://www.linkedin.com/in/billgates/details/experience/", + section_name="experience", ) mock_page.wait_for_function.assert_not_awaited() @@ -1466,7 +1480,8 @@ async def test_activity_page_timeout_proceeds_gracefully(self, mock_page): ), ): result = await extractor._extract_page_once( - "https://www.linkedin.com/in/billgates/recent-activity/all/" + "https://www.linkedin.com/in/billgates/recent-activity/all/", + section_name="posts", ) # Should return whatever text is available, not crash From 45a91530133e40f404f159134d0e67526c8af84c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 10:35:25 +0100 Subject: [PATCH 486/565] fix(scraping): label job posting context --- linkedin_mcp_server/scraping/link_metadata.py | 2 +- tests/test_link_metadata.py | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/scraping/link_metadata.py b/linkedin_mcp_server/scraping/link_metadata.py index fdc41e00..d6029fec 100644 --- a/linkedin_mcp_server/scraping/link_metadata.py +++ b/linkedin_mcp_server/scraping/link_metadata.py @@ -77,7 +77,7 @@ class RawReference(TypedDict, total=False): "honors": "honors", "languages": "languages", "contact_info": "contact info", - "job_posting": "job result", + "job_posting": "job posting", } _DEFAULT_REFERENCE_CAP = 12 diff --git a/tests/test_link_metadata.py b/tests/test_link_metadata.py index 9e4593f1..84d84e2c 100644 --- a/tests/test_link_metadata.py +++ b/tests/test_link_metadata.py @@ -437,6 +437,26 @@ def test_uses_search_result_contexts(self): }, ] + def test_uses_job_posting_context_for_job_pages(self): + references = build_references( + [ + { + "href": "https://www.linkedin.com/company/acme/", + "text": "Acme", + } + ], + "job_posting", + ) + + assert references == [ + { + "kind": "company", + "url": "/company/acme/", + "text": "Acme", + "context": "job posting", + } + ] + def test_does_not_treat_lookalike_domains_as_linkedin(self): references = build_references( [ From d62280de6df5a108a5978766ff63d7b68680c131 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 9 Mar 2026 10:52:56 +0100 Subject: [PATCH 487/565] chore: Bump version to 4.3.0 --- .mcp.json | 11 +++++++++++ AGENTS.md | 10 ++++++++++ linkedin_mcp_server/core/utils.py | 6 +++--- pyproject.toml | 2 +- uv.lock | 2 +- 5 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 .mcp.json diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000..a6172ac0 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,11 @@ +{ + "mcpServers": { + "greptile": { + "type": "http", + "url": "https://api.greptile.com/mcp", + "headers": { + "Authorization": "Bearer ${GREPTILE_API_KEY}" + } + } + } +} diff --git a/AGENTS.md b/AGENTS.md index a0bed729..9424e710 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -197,6 +197,16 @@ gh api repos/{owner}/{repo}/pulls/{pr}/comments # inline comments gh api repos/{owner}/{repo}/issues/{pr}/comments # follow-up reviews ``` +## Greptile MCP + +The project includes a `.mcp.json` that configures the Greptile MCP server for Claude Code. Contributors need to set `GREPTILE_API_KEY` in their environment (get one at [app.greptile.com](https://app.greptile.com)). + +For Codex CLI, run: + +```bash +codex mcp add greptile --url https://api.greptile.com/mcp --bearer-token-env-var GREPTILE_API_KEY +``` + ## btca When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 09ad8544..7f3a3ebe 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -32,7 +32,7 @@ async def detect_rate_limit(page: Page) -> None: raise RateLimitError( "LinkedIn security checkpoint detected. " "You may need to verify your identity or wait before continuing.", - suggested_wait_time=3600, + suggested_wait_time=30, ) # Check for CAPTCHA @@ -43,7 +43,7 @@ async def detect_rate_limit(page: Page) -> None: if captcha > 0: raise RateLimitError( "CAPTCHA challenge detected. Manual intervention required.", - suggested_wait_time=3600, + suggested_wait_time=30, ) except RateLimitError: raise @@ -75,7 +75,7 @@ async def detect_rate_limit(page: Page) -> None: ): raise RateLimitError( "Rate limit message detected on page.", - suggested_wait_time=1800, + suggested_wait_time=30, ) except RateLimitError: raise diff --git a/pyproject.toml b/pyproject.toml index fba315ea..b2227b95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.2.0" +version = "4.3.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index af1f7185..965bf715 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.2.0" +version = "4.3.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From f7c7fbe141981b4d2123bec6f0ef7b0e49108d16 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 09:54:06 +0000 Subject: [PATCH 488/565] chore: update manifest.json and docker-compose.yml to v4.3.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 7b85aceb..8169c720 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.2.0 + image: stickerdaniel/linkedin-mcp-server:4.3.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 78e54322..b0b49295 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.2.0", + "version": "4.3.0", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.2.0", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.3.0", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.2.0" + "stickerdaniel/linkedin-mcp-server:4.3.0" ] } }, From 26e97eb9771bb208f32499d88d1f35e378915b77 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 09:55:01 +0000 Subject: [PATCH 489/565] chore(deps): update all major dependencies --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9fdf133b..aa9d359c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -140,16 +140,16 @@ jobs: fi - name: Set up Docker Buildx - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4 - name: Log in to Docker Hub - uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3 + uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Build and push Docker images - uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7 with: context: . push: true From e16c38becc68af7a824d92f6510bfd8bda68ed7f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 09:56:32 +0000 Subject: [PATCH 490/565] chore(deps): update ci dependencies --- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 2 +- Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 694fb468..09995875 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@5f8e5bfe5b03891348854ae401476fc905a6ff6a # v1 + uses: anthropics/claude-code-action@26ec041249acb0a944c0a47b6c0c13f05dbc5b44 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index aa9d359c..b19c7c11 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -68,7 +68,7 @@ jobs: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2 + uses: oven-sh/setup-bun@ecf28ddc73e819eb6fa29df6b34ef8921c743461 # v2 - name: Update manifest.json and docker-compose.yml version run: | diff --git a/Dockerfile b/Dockerfile index 28d0c841..f252f272 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM python:3.14-slim-bookworm@sha256:5404df00cf00e6e7273375f415651837b4d192ac6859c44d3b740888ac798c99 # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:88234bc9e09c2b2f6d176a3daf411419eb0370d450a08129257410de9cfafd2a /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:10902f58a1606787602f303954cea099626a4adb02acbac4c69920fe9d278f82 /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From ca554600b206c2318ce922bc295178f215401da6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Wed, 11 Mar 2026 20:36:06 +0100 Subject: [PATCH 491/565] docs(README): installation link for uv in README Updated the prerequisites section to include a link for installing uv. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 188a0da5..1c715785 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ Tool responses keep readable `sections` text and may also include a compact `ref ## ๐Ÿš€ uvx Setup (Recommended - Universal) -**Prerequisites:** Install uv and run `uvx patchright install chromium` to set up the browser. +**Prerequisites:** [Install uv](https://docs.astral.sh/uv/getting-started/installation/) and run `uvx patchright install chromium` to set up the browser. ### Installation From 0d36cbf3e1f6b0c75c3a363a231bef357105ae14 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Mar 2026 14:40:27 +0100 Subject: [PATCH 492/565] feat(server): serialize tool calls --- AGENTS.md | 3 + README.md | 4 ++ docs/docker-hub.md | 4 ++ .../sequential_tool_middleware.py | 48 ++++++++++++++++ linkedin_mcp_server/server.py | 4 ++ tests/test_server.py | 57 +++++++++++++++++++ 6 files changed, 120 insertions(+) create mode 100644 linkedin_mcp_server/sequential_tool_middleware.py create mode 100644 tests/test_server.py diff --git a/AGENTS.md b/AGENTS.md index 9424e710..b29af373 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -102,6 +102,9 @@ Tools may also include: - `stdio` (default) - Standard I/O for CLI MCP clients - `streamable-http` - HTTP server mode for web-based MCP clients +- Tool calls are serialized within one server process to protect the shared + LinkedIn browser session. Concurrent client requests queue instead of running + in parallel. Use debug logging to inspect scraper lock wait/acquire/release. ## Development Notes diff --git a/README.md b/README.md index 1c715785..312ed181 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,10 @@ uvx linkedin-scraper-mcp --transport streamable-http --host 127.0.0.1 --port 808 Runtime server logs are emitted by FastMCP/Uvicorn. +Tool calls are serialized within a single server process to protect the shared +LinkedIn browser session. Concurrent client requests queue instead of running in +parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs. + **Test with mcp inspector:** 1. Install and run mcp inspector ```bunx @modelcontextprotocol/inspector``` diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 691b0fc2..a1bc3337 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -43,6 +43,10 @@ uvx linkedin-scraper-mcp --login > **Note:** Docker containers don't have a display server, so you can't use the `--login` command in Docker. Create a profile on your host first. > > **Note:** `stdio` is the default transport. Add `--transport streamable-http` only when you specifically want HTTP mode. +> +> **Note:** Tool calls are serialized within one server process to protect the +> shared LinkedIn browser session. Concurrent client requests queue instead of +> running in parallel. Use `LOG_LEVEL=DEBUG` to see scraper lock logs. ## Environment Variables diff --git a/linkedin_mcp_server/sequential_tool_middleware.py b/linkedin_mcp_server/sequential_tool_middleware.py new file mode 100644 index 00000000..78885589 --- /dev/null +++ b/linkedin_mcp_server/sequential_tool_middleware.py @@ -0,0 +1,48 @@ +"""Middleware that serializes MCP tool execution within one server process.""" + +from __future__ import annotations + +import asyncio +import logging +import time + +import mcp.types as mt + +from fastmcp.server.middleware import CallNext, Middleware, MiddlewareContext +from fastmcp.tools.tool import ToolResult + +logger = logging.getLogger(__name__) + + +class SequentialToolExecutionMiddleware(Middleware): + """Ensure only one MCP tool call executes at a time per server process.""" + + def __init__(self) -> None: + self._lock = asyncio.Lock() + + async def on_call_tool( + self, + context: MiddlewareContext[mt.CallToolRequestParams], + call_next: CallNext[mt.CallToolRequestParams, ToolResult], + ) -> ToolResult: + tool_name = context.message.name + wait_started = time.perf_counter() + logger.debug("Waiting for scraper lock for tool '%s'", tool_name) + + async with self._lock: + wait_seconds = time.perf_counter() - wait_started + logger.debug( + "Acquired scraper lock for tool '%s' after %.3fs", + tool_name, + wait_seconds, + ) + hold_started = time.perf_counter() + try: + return await call_next(context) + finally: + hold_seconds = time.perf_counter() - hold_started + logger.debug( + "Released scraper lock for tool '%s' after %.3fs", + tool_name, + hold_seconds, + ) diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index e85f08f7..f111c225 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -15,6 +15,9 @@ from linkedin_mcp_server.authentication import get_authentication_source from linkedin_mcp_server.drivers.browser import close_browser from linkedin_mcp_server.error_handler import raise_tool_error +from linkedin_mcp_server.sequential_tool_middleware import ( + SequentialToolExecutionMiddleware, +) from linkedin_mcp_server.tools.company import register_company_tools from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools @@ -46,6 +49,7 @@ def create_mcp_server() -> FastMCP: lifespan=auth_lifespan | browser_lifespan, mask_error_details=True, ) + mcp.add_middleware(SequentialToolExecutionMiddleware()) # Register all tools register_person_tools(mcp) diff --git a/tests/test_server.py b/tests/test_server.py new file mode 100644 index 00000000..0cd494d5 --- /dev/null +++ b/tests/test_server.py @@ -0,0 +1,57 @@ +import asyncio + +from fastmcp import FastMCP + +from linkedin_mcp_server.sequential_tool_middleware import ( + SequentialToolExecutionMiddleware, +) +from linkedin_mcp_server.server import create_mcp_server + + +class TestSequentialToolExecutionMiddleware: + async def test_create_mcp_server_registers_sequential_tool_middleware(self): + mcp = create_mcp_server() + + assert any( + isinstance(middleware, SequentialToolExecutionMiddleware) + for middleware in mcp.middleware + ) + + async def test_sequential_tool_middleware_serializes_parallel_tool_calls(self): + mcp = FastMCP("test") + mcp.add_middleware(SequentialToolExecutionMiddleware()) + + active_calls = 0 + max_active_calls = 0 + + @mcp.tool + async def slow_tool(delay: float = 0.05) -> dict[str, float]: + nonlocal active_calls, max_active_calls + active_calls += 1 + max_active_calls = max(max_active_calls, active_calls) + try: + await asyncio.sleep(delay) + return {"delay": delay} + finally: + active_calls -= 1 + + result_one, result_two = await asyncio.gather( + mcp.call_tool("slow_tool", {"delay": 0.05}), + mcp.call_tool("slow_tool", {"delay": 0.05}), + ) + + assert max_active_calls == 1 + assert result_one.structured_content == {"delay": 0.05} + assert result_two.structured_content == {"delay": 0.05} + + async def test_sequential_tool_middleware_preserves_tool_results(self): + mcp = FastMCP("test") + mcp.add_middleware(SequentialToolExecutionMiddleware()) + + @mcp.tool + async def simple_tool(value: int) -> dict[str, int]: + return {"value": value} + + result = await mcp.call_tool("simple_tool", {"value": 7}) + + assert result.structured_content == {"value": 7} From 2df6e04ad0a6a29b93f0a67ba9750b6b3a140931 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Mar 2026 15:10:13 +0100 Subject: [PATCH 493/565] feat(server): report queued tool progress --- .../sequential_tool_middleware.py | 24 ++++++++++++++ tests/test_server.py | 32 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/linkedin_mcp_server/sequential_tool_middleware.py b/linkedin_mcp_server/sequential_tool_middleware.py index 78885589..d9b11c1e 100644 --- a/linkedin_mcp_server/sequential_tool_middleware.py +++ b/linkedin_mcp_server/sequential_tool_middleware.py @@ -20,6 +20,22 @@ class SequentialToolExecutionMiddleware(Middleware): def __init__(self) -> None: self._lock = asyncio.Lock() + async def _report_progress( + self, + context: MiddlewareContext[mt.CallToolRequestParams], + *, + message: str, + ) -> None: + fastmcp_context = context.fastmcp_context + if fastmcp_context is None or fastmcp_context.request_context is None: + return + + await fastmcp_context.report_progress( + progress=0, + total=100, + message=message, + ) + async def on_call_tool( self, context: MiddlewareContext[mt.CallToolRequestParams], @@ -28,6 +44,10 @@ async def on_call_tool( tool_name = context.message.name wait_started = time.perf_counter() logger.debug("Waiting for scraper lock for tool '%s'", tool_name) + await self._report_progress( + context, + message="Queued waiting for scraper lock", + ) async with self._lock: wait_seconds = time.perf_counter() - wait_started @@ -36,6 +56,10 @@ async def on_call_tool( tool_name, wait_seconds, ) + await self._report_progress( + context, + message="Scraper lock acquired, starting tool", + ) hold_started = time.perf_counter() try: return await call_next(context) diff --git a/tests/test_server.py b/tests/test_server.py index 0cd494d5..4ed40244 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,6 +1,9 @@ import asyncio +from unittest.mock import AsyncMock, MagicMock, call +import mcp.types as mt from fastmcp import FastMCP +from fastmcp.server.middleware import MiddlewareContext from linkedin_mcp_server.sequential_tool_middleware import ( SequentialToolExecutionMiddleware, @@ -55,3 +58,32 @@ async def simple_tool(value: int) -> dict[str, int]: result = await mcp.call_tool("simple_tool", {"value": 7}) assert result.structured_content == {"value": 7} + + async def test_sequential_tool_middleware_reports_queue_progress(self): + middleware = SequentialToolExecutionMiddleware() + fastmcp_context = MagicMock() + fastmcp_context.request_context = object() + fastmcp_context.report_progress = AsyncMock() + call_next = AsyncMock(return_value=MagicMock()) + context = MiddlewareContext( + message=mt.CallToolRequestParams(name="slow_tool", arguments={}), + method="tools/call", + fastmcp_context=fastmcp_context, + ) + + await middleware.on_call_tool(context, call_next) + + fastmcp_context.report_progress.assert_has_awaits( + [ + call( + progress=0, + total=100, + message="Queued waiting for scraper lock", + ), + call( + progress=0, + total=100, + message="Scraper lock acquired, starting tool", + ), + ] + ) From 060f0295026e5ac3ee69f9167623ef4e9e0f6f66 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Mar 2026 22:35:13 +0100 Subject: [PATCH 494/565] feat(auth): persist runtime-derived sessions --- AGENTS.md | 17 +- README.md | 27 +- docs/docker-hub.md | 24 +- linkedin_mcp_server/authentication.py | 35 +- linkedin_mcp_server/cli_main.py | 96 +++-- linkedin_mcp_server/core/__init__.py | 2 + linkedin_mcp_server/core/auth.py | 77 +++- linkedin_mcp_server/core/browser.py | 126 +++++-- linkedin_mcp_server/drivers/browser.py | 424 +++++++++++++++++----- linkedin_mcp_server/scraping/extractor.py | 145 +++++++- linkedin_mcp_server/server.py | 6 +- linkedin_mcp_server/session_state.py | 275 ++++++++++++++ linkedin_mcp_server/setup.py | 15 +- scripts/debug_cookie_bridge.py | 345 ++++++++++++++++++ tests/conftest.py | 21 ++ tests/test_authentication.py | 84 +++-- tests/test_browser_driver.py | 396 ++++++++++++++++++-- tests/test_cli_main.py | 164 ++++++++- tests/test_core_auth.py | 80 ++++ tests/test_core_browser.py | 162 +++++++++ tests/test_scraping.py | 66 ++++ tests/test_session_state.py | 85 +++++ 22 files changed, 2439 insertions(+), 233 deletions(-) create mode 100644 linkedin_mcp_server/session_state.py create mode 100644 scripts/debug_cookie_bridge.py create mode 100644 tests/test_core_browser.py create mode 100644 tests/test_session_state.py diff --git a/AGENTS.md b/AGENTS.md index b29af373..6631afe9 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -11,7 +11,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co - Bump version: see [Release Process](#release-process) below - Install browser: `uv run patchright install chromium` - Run server locally: `uv run -m linkedin_mcp_server --no-headless` -- Run via uvx (PyPI): `uvx linkedin-scraper-mcp` +- Run via uvx (PyPI/package verification only): `uvx linkedin-scraper-mcp` - Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` **Code Quality:** @@ -25,7 +25,8 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co **Docker Commands:** - Build: `docker build -t linkedin-mcp-server .` -- Login: Use uvx locally first: `uvx linkedin-scraper-mcp --login` +- Login for local development: `uv run -m linkedin_mcp_server --login` +- Login for packaged-distribution verification: `uvx linkedin-scraper-mcp --login` ## Architecture Overview @@ -95,8 +96,11 @@ Tools may also include: **Authentication Flow:** -- Uses persistent browser profile at `~/.linkedin-mcp/profile/` -- Run with `--login` to create a profile via browser login +- Source runtime uses persistent browser profile at `~/.linkedin-mcp/profile/` +- `--login` creates a new source login generation and exports `cookies.json` +- Foreign runtimes derive their own persistent profiles under `~/.linkedin-mcp/runtime-profiles//profile/` +- The first foreign-runtime bridge exports `storage-state.json`, performs a checkpoint restart, and only then marks the derived runtime profile reusable +- Derived runtime profiles are reused across restarts and rebuilt only after a new host `--login` **Transport Modes:** @@ -140,9 +144,12 @@ Tools may also include: ## Verifying Bug Reports -Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: +Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. When working in this repository, use the local code path with `uv run`, not `uvx`, so the running process reflects the files in your workspace. Use `uvx` only when intentionally verifying the packaged distribution. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: ```bash +# Create or refresh the local source session +uv run -m linkedin_mcp_server --login + # Start server uv run -m linkedin_mcp_server --transport streamable-http --log-level DEBUG diff --git a/README.md b/README.md index 312ed181..fa7c4d42 100644 --- a/README.md +++ b/README.md @@ -187,14 +187,34 @@ parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs. Docker runs headless (no browser window), so you need to create a browser profile locally first and mount it into the container. -**Step 1: Create profile using uvx (one-time setup)** +**Step 1: Create profile on the host (one-time setup)** ```bash +# Installed package usage uvx linkedin-scraper-mcp --login + +# Local development from this repo +uv run -m linkedin_mcp_server --login ``` +If you are debugging or verifying code changes in this repository, prefer `uv run -m linkedin_mcp_server ...` so the running process matches your workspace files. Use `uvx` when intentionally testing the packaged distribution. + This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. +After login, the host writes: + +- source profile: `~/.linkedin-mcp/profile/` +- portable cookies: `~/.linkedin-mcp/cookies.json` +- source session metadata: `~/.linkedin-mcp/source-state.json` + +The first Docker run derives a Linux runtime profile under: + +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` + +That first Docker run also performs an internal checkpoint restart after `/feed/` succeeds, so the derived Linux runtime session is committed immediately instead of depending on later browser shutdown. Later Docker runs reuse that committed Linux runtime profile directly instead of reconstructing the session from cookies on every startup. Running `--login` again creates a new source login generation, which causes the next Docker run to rebuild its Linux runtime profile once. + **Step 2: Configure Claude Desktop with Docker** ```json @@ -213,7 +233,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, ``` > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again locally. +> Docker now keeps its own persistent derived runtime profile after the first successful bridge and checkpoint restart. If you run `--login` again on the host, the next Docker startup rebuilds that derived runtime profile from the new source login generation. > [!NOTE] > **Why can't I run `--login` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. @@ -237,7 +257,7 @@ This opens a browser window where you log in manually (5 minute timeout for 2FA, - `--host HOST` - HTTP server host (default: 127.0.0.1) - `--port PORT` - HTTP server port (default: 8000) - `--path PATH` - HTTP server path (default: /mcp) -- `--logout` - Clear stored LinkedIn browser profile +- `--logout` - Clear all stored LinkedIn auth state, including source and derived runtime profiles - `--timeout MS` - Browser timeout for page operations in milliseconds (default: 5000) - `--user-data-dir PATH` - Path to persistent browser profile directory (default: ~/.linkedin-mcp/profile) - `--chrome-path PATH` - Path to Chrome/Chromium executable (rarely needed in Docker) @@ -281,6 +301,7 @@ Runtime server logs are emitted by FastMCP/Uvicorn. - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` - You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. +- If Docker auth becomes stale after you re-login on the host, restart Docker once so it can rebuild its derived runtime profile from the new source login generation. **Timeout issues:** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index a1bc3337..f90c1bac 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -17,12 +17,32 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A Create a browser profile locally, then mount it into Docker. -**Step 1: Create profile using uvx (one-time setup)** +**Step 1: Create profile on the host (one-time setup)** ```bash +# Installed package usage uvx linkedin-scraper-mcp --login + +# Local development from this repo +uv run -m linkedin_mcp_server --login ``` +If you are debugging or verifying code changes in this repository, prefer `uv run -m linkedin_mcp_server ...` so the running process matches your workspace files. Use `uvx` when intentionally testing the packaged distribution. + +This creates the source session artifacts on the host: + +- `~/.linkedin-mcp/profile/` +- `~/.linkedin-mcp/cookies.json` +- `~/.linkedin-mcp/source-state.json` + +The first Docker run derives a persistent Linux runtime profile under: + +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` +- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` + +That first Docker run also performs an internal checkpoint restart after `/feed/` succeeds, so the derived Linux runtime session is committed immediately instead of depending on later browser shutdown. Later Docker runs reuse that committed Linux runtime profile directly. Re-running `--login` on the host creates a new source login generation, and the next Docker run rebuilds its derived Linux profile once. + **Step 2: Configure Claude Desktop with Docker** ```json @@ -40,7 +60,7 @@ uvx linkedin-scraper-mcp --login } ``` -> **Note:** Docker containers don't have a display server, so you can't use the `--login` command in Docker. Create a profile on your host first. +> **Note:** Docker containers don't have a display server, so you can't use the `--login` command in Docker. Create a source profile on your host first. > > **Note:** `stdio` is the default transport. Add `--transport streamable-http` only when you specifically want HTTP mode. > diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 9027d2dd..91500c2e 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -8,9 +8,13 @@ import shutil from pathlib import Path -from linkedin_mcp_server.drivers.browser import ( - get_profile_dir, +from linkedin_mcp_server.session_state import ( + clear_auth_state as clear_all_auth_state, + get_source_profile_dir, + portable_cookie_path, profile_exists, + source_state_path, + load_source_state, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError @@ -27,15 +31,25 @@ def get_authentication_source() -> bool: Raises: CredentialsNotFoundError: If no authentication method available """ - profile_dir = get_profile_dir() - if profile_exists(profile_dir): - logger.info(f"Using persistent profile from {profile_dir}") + profile_dir = get_source_profile_dir() + cookies_path = portable_cookie_path(profile_dir) + source_state = load_source_state(profile_dir) + if profile_exists(profile_dir) and cookies_path.exists() and source_state: + logger.info("Using source profile from %s", profile_dir) return True + if profile_exists(profile_dir) or cookies_path.exists(): + raise CredentialsNotFoundError( + "LinkedIn source session metadata is missing or incomplete.\n\n" + f"Expected source metadata: {source_state_path(profile_dir)}\n" + f"Expected portable cookies: {cookies_path}\n\n" + "Run with --login to create a fresh source session generation." + ) + raise CredentialsNotFoundError( - "No LinkedIn authentication found.\n\n" + "No LinkedIn source session found.\n\n" "Options:\n" - " 1. Run with --login to create a browser profile (recommended)\n" + " 1. Run with --login to create a source browser profile (recommended)\n" " 2. Run with --no-headless to login interactively\n\n" "For Docker users:\n" " Create profile on host first: uvx linkedin-scraper-mcp --login\n" @@ -54,7 +68,7 @@ def clear_profile(profile_dir: Path | None = None) -> bool: True if clearing was successful """ if profile_dir is None: - profile_dir = get_profile_dir() + profile_dir = get_source_profile_dir() if profile_dir.exists(): try: @@ -65,3 +79,8 @@ def clear_profile(profile_dir: Path | None = None) -> bool: logger.warning(f"Could not clear profile: {e}") return False return True + + +def clear_auth_state(profile_dir: Path | None = None) -> bool: + """Clear source session artifacts and all derived runtime sessions.""" + return clear_all_auth_state(profile_dir or get_source_profile_dir()) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 899841d9..1692d9df 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -13,10 +13,10 @@ import inquirer -from linkedin_mcp_server.core import AuthenticationError, RateLimitError, is_logged_in +from linkedin_mcp_server.core import AuthenticationError, RateLimitError from linkedin_mcp_server.authentication import ( - clear_profile, + clear_auth_state, get_authentication_source, ) from linkedin_mcp_server.config import get_config @@ -29,6 +29,15 @@ ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError from linkedin_mcp_server.logging_config import configure_logging +from linkedin_mcp_server.session_state import ( + get_runtime_id, + load_runtime_state, + load_source_state, + portable_cookie_path, + runtime_profile_dir, + runtime_storage_state_path, + source_state_path, +) from linkedin_mcp_server.server import create_mcp_server from linkedin_mcp_server.setup import run_interactive_setup, run_profile_creation @@ -68,14 +77,18 @@ def clear_profile_and_exit() -> None: version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Profile Clear mode") - profile_dir = get_profile_dir() + auth_root = get_profile_dir().parent - if not profile_exists(profile_dir): - print("โ„น๏ธ No browser profile found") + if not ( + profile_exists(get_profile_dir()) + or portable_cookie_path(get_profile_dir()).exists() + or source_state_path(get_profile_dir()).exists() + ): + print("โ„น๏ธ No authentication state found") print("Nothing to clear.") sys.exit(0) - print(f"๐Ÿ”‘ Clear LinkedIn browser profile from {profile_dir}?") + print(f"๐Ÿ”‘ Clear LinkedIn authentication state from {auth_root}?") try: confirmation = ( @@ -88,10 +101,10 @@ def clear_profile_and_exit() -> None: print("\nโŒ Operation cancelled") sys.exit(0) - if clear_profile(profile_dir): - print("โœ… LinkedIn browser profile cleared successfully!") + if clear_auth_state(get_profile_dir()): + print("โœ… LinkedIn authentication state cleared successfully!") else: - print("โŒ Failed to clear profile") + print("โŒ Failed to clear authentication state") sys.exit(1) sys.exit(0) @@ -127,20 +140,54 @@ def profile_info_and_exit() -> None: version = get_version() logger.info(f"LinkedIn MCP Server v{version} - Session Info mode") - # Check if profile directory exists first profile_dir = get_profile_dir() - if not profile_exists(profile_dir): - print(f"โŒ No browser profile found at {profile_dir}") - print(" Run with --login to create a profile") + cookies_path = portable_cookie_path(profile_dir) + source_state = load_source_state(profile_dir) + current_runtime = get_runtime_id() + + if not source_state or not profile_exists(profile_dir) or not cookies_path.exists(): + print(f"โŒ No valid source session found at {profile_dir}") + print(" Run with --login to create a source session") sys.exit(1) - # Check if session is valid by testing login status + print(f"Current runtime: {current_runtime}") + print(f"Source runtime: {source_state.source_runtime_id}") + print(f"Login generation: {source_state.login_generation}") + + runtime_state = None + runtime_profile = None + runtime_storage_state = None + bridge_required = False + + if current_runtime == source_state.source_runtime_id: + print(f"Profile mode: source ({profile_dir})") + else: + runtime_state = load_runtime_state(current_runtime, profile_dir) + runtime_profile = runtime_profile_dir(current_runtime, profile_dir) + runtime_storage_state = runtime_storage_state_path(current_runtime, profile_dir) + if ( + runtime_state + and runtime_state.source_login_generation == source_state.login_generation + and profile_exists(runtime_profile) + and runtime_storage_state.exists() + ): + print( + f"Profile mode: derived (committed, current generation) ({runtime_profile})" + ) + else: + bridge_required = True + state = "stale generation" if runtime_state else "missing" + print(f"Profile mode: derived ({state})") + print( + "Storage snapshot: " + f"{runtime_storage_state if runtime_storage_state and runtime_storage_state.exists() else 'missing'}" + ) + async def check_session() -> bool: try: set_headless(True) # Always check headless browser = await get_or_create_browser() - valid = await is_logged_in(browser.page) - return valid + return browser.is_authenticated except AuthenticationError: return False except Exception as e: @@ -149,6 +196,12 @@ async def check_session() -> bool: finally: await close_browser() + if bridge_required: + print( + "โ„น๏ธ A derived runtime profile will be created and checkpoint-committed on the next server startup." + ) + sys.exit(0) + try: valid = asyncio.run(check_session()) except Exception as e: @@ -156,13 +209,14 @@ async def check_session() -> bool: print(" Check logs and browser configuration.") sys.exit(1) + active_profile = profile_dir if runtime_profile is None else runtime_profile if valid: - print(f"โœ… Session is valid (profile: {profile_dir})") + print(f"โœ… Session is valid (profile: {active_profile})") sys.exit(0) - else: - print(f"โŒ Session expired or invalid (profile: {profile_dir})") - print(" Run with --login to re-authenticate") - sys.exit(1) + + print(f"โŒ Session expired or invalid (profile: {active_profile})") + print(" Run with --login to re-authenticate") + sys.exit(1) def ensure_authentication_ready() -> None: diff --git a/linkedin_mcp_server/core/__init__.py b/linkedin_mcp_server/core/__init__.py index 326d3e32..aba9ff76 100644 --- a/linkedin_mcp_server/core/__init__.py +++ b/linkedin_mcp_server/core/__init__.py @@ -4,6 +4,7 @@ detect_auth_barrier, detect_auth_barrier_quick, is_logged_in, + resolve_remember_me_prompt, wait_for_manual_login, warm_up_browser, ) @@ -33,6 +34,7 @@ "detect_rate_limit", "handle_modal_close", "is_logged_in", + "resolve_remember_me_prompt", "scroll_to_bottom", "wait_for_manual_login", "warm_up_browser", diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 25e69bbc..0326f3c7 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -29,6 +29,11 @@ ("choose an account", "sign in using another account"), ("continue as", "sign in using another account"), ) +_REMEMBER_ME_CONTAINER_SELECTOR = "#rememberme-div" +_REMEMBER_ME_BUTTON_SELECTOR = ( + "#rememberme-div > div.memberList-container > div > div > " + "div.member-profile-container.list-box > div.member-profile-block > button" +) async def warm_up_browser(page: Page) -> None: @@ -93,7 +98,19 @@ async def is_logged_in(page: Page) -> bool: pattern in current_url for pattern in authenticated_only_pages ) - return has_nav_elements or is_authenticated_page + if not is_authenticated_page: + return has_nav_elements + + if has_nav_elements: + return True + + # Empty authenticated-only pages are a false positive during cookie + # bridge recovery. Require some real page content before trusting URL. + body_text = await page.evaluate("() => document.body?.innerText || ''") + if not isinstance(body_text, str): + return False + + return bool(body_text.strip()) except PlaywrightTimeoutError: logger.warning( "Timeout checking login status on %s โ€” treating as not logged in", @@ -163,6 +180,60 @@ async def detect_auth_barrier_quick(page: Page) -> str | None: return await _detect_auth_barrier(page, include_body_text=False) +async def resolve_remember_me_prompt(page: Page) -> bool: + """Click through LinkedIn's saved-account chooser when it appears.""" + try: + logger.debug("Checking remember-me prompt on %s", page.url) + try: + await page.wait_for_selector(_REMEMBER_ME_CONTAINER_SELECTOR, timeout=3000) + logger.debug("Remember-me container appeared") + except PlaywrightTimeoutError: + logger.debug("Remember-me container did not appear in time") + return False + + target = page.locator(_REMEMBER_ME_BUTTON_SELECTOR).first + target_count = await page.locator(_REMEMBER_ME_BUTTON_SELECTOR).count() + logger.debug( + "Remember-me target count for %s: %d", + _REMEMBER_ME_BUTTON_SELECTOR, + target_count, + ) + try: + await target.wait_for(state="visible", timeout=3000) + logger.debug("Remember-me button became visible") + except PlaywrightTimeoutError: + logger.debug( + "Remember-me prompt container appeared without a visible login button" + ) + return False + + logger.info("Clicking LinkedIn saved-account chooser to resume session") + try: + await target.scroll_into_view_if_needed(timeout=3000) + except PlaywrightTimeoutError: + logger.debug("Remember-me button did not scroll into view in time") + + try: + await target.click(timeout=5000) + logger.debug("Remember-me button click succeeded") + except PlaywrightTimeoutError: + logger.debug("Retrying remember-me prompt click with force=True") + await target.click(timeout=5000, force=True) + logger.debug("Remember-me button force-click succeeded") + try: + await page.wait_for_load_state("domcontentloaded", timeout=10000) + except PlaywrightTimeoutError: + logger.debug("Remember-me prompt click did not finish loading in time") + await asyncio.sleep(1) + return True + except PlaywrightTimeoutError: + logger.debug("Remember-me prompt was present but not clickable in time") + return False + except Exception: + logger.debug("Failed to resolve remember-me prompt", exc_info=True) + return False + + def _is_auth_blocker_url(url: str) -> bool: """Return True only for real auth routes, not arbitrary slug substrings.""" path = urlparse(url).path or "/" @@ -194,6 +265,10 @@ async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: start_time = asyncio.get_event_loop().time() while True: + if await resolve_remember_me_prompt(page): + logger.info("Resolved saved-account chooser during manual login flow") + continue + if await is_logged_in(page): logger.info("Manual login completed successfully") return diff --git a/linkedin_mcp_server/core/browser.py b/linkedin_mcp_server/core/browser.py index f6778586..fa1845d9 100644 --- a/linkedin_mcp_server/core/browser.py +++ b/linkedin_mcp_server/core/browser.py @@ -2,6 +2,7 @@ import json import logging +import os from pathlib import Path from typing import Any @@ -100,20 +101,28 @@ async def start(self) -> None: async def close(self) -> None: """Close persistent context and cleanup resources.""" - try: - if self._context: - await self._context.close() - self._context = None - self._page = None + context = self._context + playwright = self._playwright + self._context = None + self._page = None + self._playwright = None - if self._playwright: - await self._playwright.stop() - self._playwright = None + if context is None and playwright is None: + return - logger.info("Browser closed") + if context is not None: + try: + await context.close() + except Exception as exc: + logger.error("Error closing browser context: %s", exc) - except Exception as e: - logger.error("Error closing browser: %s", e) + if playwright is not None: + try: + await playwright.stop() + except Exception as exc: + logger.error("Error stopping playwright: %s", exc) + + logger.info("Browser closed") @property def page(self) -> Page: @@ -184,14 +193,82 @@ async def export_cookies(self, cookie_path: str | Path | None = None) -> bool: logger.exception("Failed to export cookies") return False - _AUTH_COOKIE_NAMES = frozenset({"li_at", "li_rm"}) + async def export_storage_state( + self, path: str | Path, *, indexed_db: bool = True + ) -> bool: + """Export the current browser storage state for diagnostics and recovery.""" + if not self._context: + logger.warning("Cannot export storage state: no browser context") + return False + + storage_path = Path(path) + storage_path.parent.mkdir(parents=True, exist_ok=True) + try: + await self._context.storage_state( + path=storage_path, + indexed_db=indexed_db, + ) + logger.info( + "Exported runtime storage snapshot to %s (indexed_db=%s)", + storage_path, + indexed_db, + ) + return True + except Exception: + logger.exception("Failed to export storage state to %s", storage_path) + return False + + _BRIDGE_COOKIE_PRESETS = { + "bridge_core": frozenset( + { + "li_at", + "li_rm", + "JSESSIONID", + "bcookie", + "bscookie", + "liap", + "lidc", + "li_gc", + "lang", + "timezone", + "li_mc", + } + ), + "auth_minimal": frozenset( + { + "li_at", + "JSESSIONID", + "bcookie", + "bscookie", + "lidc", + } + ), + } + + @classmethod + def _bridge_cookie_names(cls) -> frozenset[str]: + preset_name = ( + os.getenv( + "LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", + "bridge_core", + ).strip() + or "bridge_core" + ) + preset = cls._BRIDGE_COOKIE_PRESETS.get(preset_name) + if preset is None: + logger.warning( + "Unknown LINKEDIN_DEBUG_BRIDGE_COOKIE_SET=%r, falling back to bridge_core", + preset_name, + ) + return cls._BRIDGE_COOKIE_PRESETS["bridge_core"] + return preset async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: - """Import auth cookies (li_at, li_rm) from a portable JSON file. + """Import the portable LinkedIn bridge cookie subset. - Clears all existing browser cookies before importing to avoid - undecryptable cookie conflicts in the persistent store. - Only li_at and li_rm cookies are imported; others are ignored. + Fresh browser-side cookies are preserved. The imported subset is the + smallest known set that can reconstruct a usable authenticated page in + a fresh profile. """ if not self._context: logger.warning("Cannot import cookies: no browser context") @@ -208,22 +285,27 @@ async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: logger.debug("Cookie file is empty") return False + bridge_cookie_names = self._bridge_cookie_names() + cookies = [ self._normalize_cookie_domain(c) for c in all_cookies - if c.get("name") in self._AUTH_COOKIE_NAMES + if "linkedin.com" in c.get("domain", "") + and c.get("name") in bridge_cookie_names ] - if not cookies: - logger.warning("No auth cookies (li_at/li_rm) found in %s", path) + + has_li_at = any(c.get("name") == "li_at" for c in cookies) + if not has_li_at: + logger.warning("No li_at cookie found in %s", path) return False - # Clear undecryptable cookies from the persistent store first. - await self._context.clear_cookies() await self._context.add_cookies(cookies) # type: ignore[arg-type] logger.info( - "Imported %d auth cookies from %s: %s", + "Imported %d LinkedIn bridge cookies from %s (preset=%s, li_at=%s): %s", len(cookies), path, + os.getenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "bridge_core"), + has_li_at, ", ".join(c["name"] for c in cookies), ) return True diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index d7849bd8..01a44b9f 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -7,28 +7,66 @@ """ import logging -import shutil -import tempfile +import os from pathlib import Path +import asyncio from linkedin_mcp_server.core import ( AuthenticationError, BrowserManager, + detect_auth_barrier_quick, detect_rate_limit, is_logged_in, + resolve_remember_me_prompt, ) from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.session_state import ( + SourceState, + clear_runtime_profile, + get_runtime_id, + get_source_profile_dir, + load_runtime_state, + load_source_state, + portable_cookie_path, + profile_exists as session_profile_exists, + runtime_profile_dir, + runtime_storage_state_path, + write_runtime_state, +) logger = logging.getLogger(__name__) # Default persistent profile directory DEFAULT_PROFILE_DIR = Path.home() / ".linkedin-mcp" / "profile" - # Global browser instance (singleton) _browser: BrowserManager | None = None +_browser_cookie_export_path: Path | None = None _headless: bool = True +_NAV_STABILIZE_DELAY_SECONDS = 5.0 + + +async def _stabilize_navigation(label: str) -> None: + """Pause between LinkedIn startup actions to rule out timing issues.""" + if os.environ.get("PYTEST_CURRENT_TEST"): + return + logger.debug( + "Stabilizing navigation for %.1fs after %s", + _NAV_STABILIZE_DELAY_SECONDS, + label, + ) + await asyncio.sleep(_NAV_STABILIZE_DELAY_SECONDS) + + +def _debug_skip_checkpoint_restart() -> bool: + """Return whether to keep the fresh bridged browser alive for this run.""" + return os.getenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "").strip().lower() in { + "1", + "true", + "yes", + "on", + } def _apply_browser_settings(browser: BrowserManager) -> None: @@ -37,6 +75,207 @@ def _apply_browser_settings(browser: BrowserManager) -> None: browser.page.set_default_timeout(config.browser.default_timeout) +async def _log_feed_failure_context( + browser: BrowserManager, + reason: str, + exc: Exception | None = None, +) -> None: + """Log the page state when /feed/ validation fails.""" + page = browser.page + + try: + title = await page.title() + except Exception: + title = "" + + try: + remember_me = (await page.locator("#rememberme-div").count()) > 0 + except Exception: + remember_me = False + + try: + body_text = await page.evaluate("() => document.body?.innerText || ''") + except Exception: + body_text = "" + + if not isinstance(body_text, str): + body_text = "" + + logger.warning( + "Feed auth check failed on %s: %s title=%r remember_me=%s body_marker=%r", + page.url, + reason, + title, + remember_me, + " ".join(body_text.split())[:200], + exc_info=exc, + ) + + +async def _feed_auth_succeeds( + browser: BrowserManager, + *, + allow_remember_me: bool = True, +) -> bool: + """Validate that /feed/ loads without an auth barrier.""" + try: + await browser.page.goto( + "https://www.linkedin.com/feed/", + wait_until="domcontentloaded", + ) + await _stabilize_navigation("feed navigation") + if allow_remember_me: + if await resolve_remember_me_prompt(browser.page): + await _stabilize_navigation("remember-me resolution") + barrier = await detect_auth_barrier_quick(browser.page) + if barrier is not None: + await _log_feed_failure_context(browser, barrier) + return False + return True + except Exception as exc: + if allow_remember_me and await resolve_remember_me_prompt(browser.page): + await _stabilize_navigation("remember-me resolution after feed failure") + barrier = await detect_auth_barrier_quick(browser.page) + if barrier is None: + return True + await _log_feed_failure_context(browser, str(exc), exc) + return False + + +def _launch_options() -> tuple[dict[str, str], dict[str, int], object]: + config = get_config() + viewport = { + "width": config.browser.viewport_width, + "height": config.browser.viewport_height, + } + launch_options: dict[str, str] = {} + if config.browser.chrome_path: + launch_options["executable_path"] = config.browser.chrome_path + logger.info("Using custom Chrome path: %s", config.browser.chrome_path) + return launch_options, viewport, config + + +def _make_browser( + profile_dir: Path, + *, + launch_options: dict[str, str], + viewport: dict[str, int], +) -> BrowserManager: + config = get_config() + return BrowserManager( + user_data_dir=profile_dir, + headless=_headless, + slow_mo=config.browser.slow_mo, + user_agent=config.browser.user_agent, + viewport=viewport, + **launch_options, + ) + + +async def _authenticate_existing_profile( + profile_dir: Path, + *, + launch_options: dict[str, str], + viewport: dict[str, int], +) -> BrowserManager: + browser = _make_browser( + profile_dir, launch_options=launch_options, viewport=viewport + ) + await browser.start() + try: + if not await _feed_auth_succeeds(browser): + raise AuthenticationError( + f"Stored runtime profile is invalid: {profile_dir}. Run with --login to refresh the source session." + ) + browser.is_authenticated = True + return browser + except Exception: + await browser.close() + raise + + +async def _bridge_runtime_profile( + profile_dir: Path, + *, + cookie_path: Path, + source_state: SourceState, + runtime_id: str, + launch_options: dict[str, str], + viewport: dict[str, int], +) -> BrowserManager: + clear_runtime_profile(runtime_id, get_source_profile_dir()) + profile_dir.parent.mkdir(parents=True, exist_ok=True) + storage_state_path = runtime_storage_state_path( + runtime_id, get_source_profile_dir() + ) + browser = _make_browser( + profile_dir, launch_options=launch_options, viewport=viewport + ) + await browser.start() + try: + await browser.page.goto( + "https://www.linkedin.com/feed/", wait_until="domcontentloaded" + ) + await _stabilize_navigation("pre-import feed navigation") + if not await browser.import_cookies(cookie_path): + raise AuthenticationError( + "Portable authentication could not be imported. Run with --login to create a fresh source session." + ) + await _stabilize_navigation("bridge cookie import") + if not await _feed_auth_succeeds(browser): + raise AuthenticationError( + "No authentication found. Run with --login to create a profile." + ) + await _stabilize_navigation("post-import feed validation") + if _debug_skip_checkpoint_restart(): + logger.warning( + "Skipping checkpoint restart for derived runtime profile %s " + "(LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART enabled)", + profile_dir, + ) + browser.is_authenticated = True + return browser + if not await browser.export_storage_state(storage_state_path, indexed_db=True): + raise AuthenticationError( + "Derived runtime session could not be checkpointed. Run with --login to create a fresh source session." + ) + await _stabilize_navigation("runtime storage-state export") + logger.info("Checkpoint-restarting derived runtime profile %s", profile_dir) + await browser.close() + reopened = _make_browser( + profile_dir, + launch_options=launch_options, + viewport=viewport, + ) + await reopened.start() + await _stabilize_navigation("derived profile reopen") + try: + if not await _feed_auth_succeeds(reopened): + logger.warning( + "Stored derived runtime profile failed post-commit validation" + ) + raise AuthenticationError( + "Derived runtime validation failed; no automatic re-bridge will be attempted. Run with --login to create a fresh source session." + ) + await _stabilize_navigation("post-reopen feed validation") + write_runtime_state( + runtime_id, + source_state, + storage_state_path, + get_source_profile_dir(), + ) + logger.info("Derived runtime profile committed for %s", runtime_id) + reopened.is_authenticated = True + return reopened + except Exception: + await reopened.close() + raise + except Exception: + await browser.close() + clear_runtime_profile(runtime_id, get_source_profile_dir()) + raise + + async def get_or_create_browser( headless: bool | None = None, ) -> BrowserManager: @@ -55,7 +294,7 @@ async def get_or_create_browser( Raises: AuthenticationError: If no valid authentication found """ - global _browser, _headless + global _browser, _browser_cookie_export_path, _headless if headless is not None: _headless = headless @@ -63,118 +302,116 @@ async def get_or_create_browser( if _browser is not None: return _browser - config = get_config() - user_data_dir = Path(config.browser.user_data_dir).expanduser() - viewport = { - "width": config.browser.viewport_width, - "height": config.browser.viewport_height, - } + launch_options, viewport, config = _launch_options() + source_profile_dir = get_profile_dir() + cookie_path = portable_cookie_path(source_profile_dir) + source_state = load_source_state(source_profile_dir) + if ( + not source_state + or not profile_exists(source_profile_dir) + or not cookie_path.exists() + ): + raise AuthenticationError( + "No source authentication found. Run with --login to create a profile." + ) - # Build launch options for custom browser path - launch_options: dict[str, str] = {} - if config.browser.chrome_path: - launch_options["executable_path"] = config.browser.chrome_path - logger.info("Using custom Chrome path: %s", config.browser.chrome_path) + current_runtime_id = get_runtime_id() - logger.info( - "Creating new browser (headless=%s, slow_mo=%sms, viewport=%sx%s, profile=%s)", - _headless, - config.browser.slow_mo, - viewport["width"], - viewport["height"], - user_data_dir, - ) - browser = BrowserManager( - user_data_dir=user_data_dir, - headless=_headless, - slow_mo=config.browser.slow_mo, - user_agent=config.browser.user_agent, - viewport=viewport, - **launch_options, - ) - await browser.start() - - # Navigate to LinkedIn to check authentication - await browser.page.goto("https://www.linkedin.com/feed/") - if await is_logged_in(browser.page): + if current_runtime_id == source_state.source_runtime_id: + logger.info( + "Using source profile for runtime %s (profile=%s)", + current_runtime_id, + source_profile_dir, + ) + browser = await _authenticate_existing_profile( + source_profile_dir, + launch_options=launch_options, + viewport=viewport, + ) _apply_browser_settings(browser) - _browser = browser # Assign only after auth succeeds + _browser = browser + _browser_cookie_export_path = cookie_path return _browser - # Native auth failed โ€” try the cross-platform cookie bridge. - # On macOSโ†’Linux, Chromium can't decrypt macOS-encrypted cookies in the - # persistent profile. We copy the profile to a temp dir (so the original - # isn't corrupted by Linux Chromium writing back), remove the undecryptable - # Cookies DB, and inject auth cookies from the portable JSON file. - cookie_path = user_data_dir.parent / "cookies.json" - if cookie_path.exists(): - logger.info("Native auth failed, attempting cross-platform cookie bridge...") - await browser.close() - - # Copy profile to temp dir โ€” protects the macOS original - temp_dir = Path(tempfile.mkdtemp(prefix="linkedin-mcp-")) - temp_profile = temp_dir / "profile" - shutil.copytree(user_data_dir, temp_profile) - - # Remove encrypted Cookies DB (can't be decrypted cross-platform) - (temp_profile / "Default" / "Cookies").unlink(missing_ok=True) - (temp_profile / "Default" / "Cookies-journal").unlink(missing_ok=True) + runtime_state = load_runtime_state(current_runtime_id, source_profile_dir) + derived_profile_dir = runtime_profile_dir(current_runtime_id, source_profile_dir) + storage_state_path = runtime_storage_state_path( + current_runtime_id, source_profile_dir + ) + generation_matches = ( + runtime_state is not None + and runtime_state.source_login_generation == source_state.login_generation + ) - browser = BrowserManager( - user_data_dir=temp_profile, - headless=_headless, - slow_mo=config.browser.slow_mo, - user_agent=config.browser.user_agent, + if ( + generation_matches + and profile_exists(derived_profile_dir) + and storage_state_path.exists() + ): + logger.info( + "Using derived runtime profile for %s (profile=%s)", + current_runtime_id, + derived_profile_dir, + ) + browser = await _authenticate_existing_profile( + derived_profile_dir, + launch_options=launch_options, viewport=viewport, - **launch_options, ) - await browser.start() - - # First nav establishes session cookies (bcookie, JSESSIONID, etc.) - await browser.page.goto("https://www.linkedin.com/feed/") - # Import auth cookies (li_at, li_rm) from the portable file - if await browser.import_cookies(cookie_path): - await browser.page.goto("https://www.linkedin.com/feed/") - if await is_logged_in(browser.page): - logger.info("Authentication recovered via portable cookies") - _apply_browser_settings(browser) - _browser = browser - return _browser - - # Auth failed โ€” clean up and fail fast - await browser.close() - raise AuthenticationError( - "No authentication found. Run with --login to create a profile." + _apply_browser_settings(browser) + _browser = browser + _browser_cookie_export_path = None + return _browser + + logger.info( + "Deriving runtime profile for %s from source generation %s", + current_runtime_id, + source_state.login_generation, ) + browser = await _bridge_runtime_profile( + derived_profile_dir, + cookie_path=cookie_path, + source_state=source_state, + runtime_id=current_runtime_id, + launch_options=launch_options, + viewport=viewport, + ) + _apply_browser_settings(browser) + _browser = browser + _browser_cookie_export_path = None + return _browser async def close_browser() -> None: """Close the browser and cleanup resources.""" - global _browser + global _browser, _browser_cookie_export_path - if _browser is not None: - logger.info("Closing browser...") - # Export cookies before closing to keep portable file fresh + browser = _browser + cookie_export_path = _browser_cookie_export_path + _browser = None + _browser_cookie_export_path = None + + if browser is None: + return + + logger.info("Closing browser...") + if cookie_export_path is not None: try: - await _browser.export_cookies() + await browser.export_cookies(cookie_export_path) except Exception: logger.debug("Cookie export on close skipped", exc_info=True) - await _browser.close() - _browser = None - logger.info("Browser closed") + await browser.close() + logger.info("Browser closed") def get_profile_dir() -> Path: """Get the resolved profile directory from config.""" - config = get_config() - return Path(config.browser.user_data_dir).expanduser() + return get_source_profile_dir() def profile_exists(profile_dir: Path | None = None) -> bool: """Check if a persistent browser profile exists and is non-empty.""" - if profile_dir is None: - profile_dir = get_profile_dir() - return profile_dir.is_dir() and any(profile_dir.iterdir()) + return session_profile_exists(profile_dir or get_profile_dir()) def set_headless(headless: bool) -> None: @@ -191,6 +428,8 @@ async def validate_session() -> bool: True if session is valid and user is logged in """ browser = await get_or_create_browser() + if browser.is_authenticated: + return True return await is_logged_in(browser.page) @@ -220,6 +459,7 @@ async def check_rate_limit() -> None: def reset_browser_for_testing() -> None: """Reset global browser state for test isolation.""" - global _browser, _headless + global _browser, _browser_cookie_export_path, _headless _browser = None + _browser_cookie_export_path = None _headless = True diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 012e0c70..232508e0 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -3,13 +3,18 @@ import asyncio from dataclasses import dataclass import logging +import os import re -from typing import Any +from typing import Any, Literal from urllib.parse import quote_plus from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError -from linkedin_mcp_server.core import detect_auth_barrier, detect_auth_barrier_quick +from linkedin_mcp_server.core import ( + detect_auth_barrier, + detect_auth_barrier_quick, + resolve_remember_me_prompt, +) from linkedin_mcp_server.core.exceptions import ( AuthenticationError, LinkedInScraperException, @@ -30,8 +35,11 @@ logger = logging.getLogger(__name__) +WaitUntil = Literal["commit", "domcontentloaded", "load", "networkidle"] + # Delay between page navigations to avoid rate limiting _NAV_DELAY = 2.0 +_NAV_STABILIZE_DELAY = 5.0 # Backoff before retrying a rate-limited page _RATE_LIMIT_RETRY_DELAY = 5.0 @@ -74,6 +82,18 @@ _SORT_BY_MAP = {"date": "DD", "relevance": "R"} +async def _stabilize_navigation(label: str) -> None: + """Pause between LinkedIn navigations to rule out timing issues.""" + if os.environ.get("PYTEST_CURRENT_TEST"): + return + logger.debug( + "Stabilizing navigation for %.1fs after %s", + _NAV_STABILIZE_DELAY, + label, + ) + await asyncio.sleep(_NAV_STABILIZE_DELAY) + + def _normalize_csv(value: str, mapping: dict[str, str]) -> str: """Normalize a comma-separated filter value using the provided mapping.""" parts = [v.strip() for v in value.split(",")] @@ -151,6 +171,59 @@ class LinkedInExtractor: def __init__(self, page: Page): self._page = page + @staticmethod + def _normalize_body_marker(value: Any) -> str: + """Compress body text into a short, single-line diagnostic marker.""" + if not isinstance(value, str): + return "" + return re.sub(r"\s+", " ", value).strip()[:200] + + async def _log_navigation_failure( + self, + target_url: str, + wait_until: str, + navigation_error: Exception, + hops: list[str], + ) -> None: + """Emit structured diagnostics for a failed target navigation.""" + try: + title = await self._page.title() + except Exception: + title = "" + + try: + auth_barrier = await detect_auth_barrier(self._page) + except Exception: + auth_barrier = None + + try: + remember_me_visible = ( + await self._page.locator("#rememberme-div").count() + ) > 0 + except Exception: + remember_me_visible = False + + try: + body_marker = self._normalize_body_marker( + await self._page.evaluate("() => document.body?.innerText || ''") + ) + except Exception: + body_marker = "" + + logger.warning( + "Navigation to %s failed (wait_until=%s, error=%s). " + "current_url=%s title=%r auth_barrier=%s remember_me=%s hops=%s body_marker=%r", + target_url, + wait_until, + navigation_error, + self._page.url, + title, + auth_barrier, + remember_me_visible, + hops, + body_marker, + ) + async def _raise_if_auth_barrier( self, url: str, @@ -171,23 +244,65 @@ async def _raise_if_auth_barrier( raise AuthenticationError(message) from navigation_error raise AuthenticationError(message) - async def _navigate_to_page(self, url: str) -> None: + async def _goto_with_auth_checks( + self, + url: str, + *, + wait_until: WaitUntil = "domcontentloaded", + allow_remember_me: bool = True, + ) -> None: """Navigate to a LinkedIn page and fail fast on auth barriers.""" + hops: list[str] = [] + + def record_navigation(frame: Any) -> None: + if frame != self._page.main_frame: + return + frame_url = getattr(frame, "url", "") + if frame_url and (not hops or hops[-1] != frame_url): + hops.append(frame_url) + + self._page.on("framenavigated", record_navigation) try: - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) - except Exception as exc: - await self._raise_if_auth_barrier(url, navigation_error=exc) - raise + try: + await self._page.goto(url, wait_until=wait_until, timeout=30000) + await _stabilize_navigation(f"goto {url}") + except Exception as exc: + if allow_remember_me and await resolve_remember_me_prompt(self._page): + await _stabilize_navigation(f"remember-me resolution for {url}") + await self._goto_with_auth_checks( + url, + wait_until=wait_until, + allow_remember_me=False, + ) + return + await self._log_navigation_failure(url, wait_until, exc, hops) + await self._raise_if_auth_barrier(url, navigation_error=exc) + raise - barrier = await detect_auth_barrier_quick(self._page) - if not barrier: - return + barrier = await detect_auth_barrier_quick(self._page) + if not barrier: + return - logger.warning("Authentication barrier detected on %s: %s", url, barrier) - raise AuthenticationError( - "LinkedIn requires interactive re-authentication. " - "Run with --login and complete the account selection/sign-in flow." - ) + if allow_remember_me and await resolve_remember_me_prompt(self._page): + await _stabilize_navigation(f"remember-me retry for {url}") + await self._goto_with_auth_checks( + url, + wait_until=wait_until, + allow_remember_me=False, + ) + return + + logger.warning("Authentication barrier detected on %s: %s", url, barrier) + raise AuthenticationError( + "LinkedIn requires interactive re-authentication. " + "Run with --login and complete the account selection/sign-in flow." + ) + finally: + self._page.remove_listener("framenavigated", record_navigation) + + async def _navigate_to_page(self, url: str) -> None: + """Navigate to a LinkedIn page and fail fast on auth barriers.""" + await self._goto_with_auth_checks(url) async def extract_page( self, diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index f111c225..11025d2a 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -27,7 +27,11 @@ @lifespan async def browser_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: - """Manage browser lifecycle โ€” cleanup on shutdown.""" + """Manage browser lifecycle โ€” cleanup on shutdown. + + Derived runtime durability must not depend on this hook. Docker runtime + sessions are checkpoint-committed when they are created. + """ logger.info("LinkedIn MCP Server starting...") yield {} logger.info("LinkedIn MCP Server shutting down...") diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py new file mode 100644 index 00000000..8c46b49f --- /dev/null +++ b/linkedin_mcp_server/session_state.py @@ -0,0 +1,275 @@ +"""Runtime-aware authentication state for cross-platform profile reuse.""" + +from __future__ import annotations + +from dataclasses import asdict, dataclass +from datetime import UTC, datetime +import json +import logging +import platform +from pathlib import Path +import shutil +from typing import Any +from uuid import uuid4 + +from linkedin_mcp_server.config import get_config + +logger = logging.getLogger(__name__) + +_SOURCE_STATE_FILE = "source-state.json" +_RUNTIME_STATE_FILE = "runtime-state.json" +_RUNTIME_PROFILES_DIR = "runtime-profiles" + + +@dataclass +class SourceState: + version: int + source_runtime_id: str + login_generation: str + created_at: str + profile_path: str + cookies_path: str + + +@dataclass +class RuntimeState: + version: int + runtime_id: str + source_runtime_id: str + source_login_generation: str + created_at: str + committed_at: str + profile_path: str + storage_state_path: str + commit_method: str + + +def get_source_profile_dir() -> Path: + """Return the configured source profile directory.""" + return Path(get_config().browser.user_data_dir).expanduser() + + +def auth_root_dir(source_profile_dir: Path | None = None) -> Path: + """Return the root directory containing auth artifacts.""" + profile_dir = source_profile_dir or get_source_profile_dir() + return profile_dir.expanduser().resolve().parent + + +def portable_cookie_path(source_profile_dir: Path | None = None) -> Path: + """Return the portable cookie export path.""" + return auth_root_dir(source_profile_dir) / "cookies.json" + + +def source_state_path(source_profile_dir: Path | None = None) -> Path: + """Return the source session metadata path.""" + return auth_root_dir(source_profile_dir) / _SOURCE_STATE_FILE + + +def runtime_profiles_root(source_profile_dir: Path | None = None) -> Path: + """Return the root directory for derived runtime profiles.""" + return auth_root_dir(source_profile_dir) / _RUNTIME_PROFILES_DIR + + +def runtime_dir(runtime_id: str, source_profile_dir: Path | None = None) -> Path: + """Return the directory for one runtime's derived session.""" + return runtime_profiles_root(source_profile_dir) / runtime_id + + +def runtime_profile_dir( + runtime_id: str, source_profile_dir: Path | None = None +) -> Path: + """Return the profile directory for one runtime's derived session.""" + return runtime_dir(runtime_id, source_profile_dir) / "profile" + + +def runtime_state_path(runtime_id: str, source_profile_dir: Path | None = None) -> Path: + """Return the metadata path for one runtime's derived session.""" + return runtime_dir(runtime_id, source_profile_dir) / _RUNTIME_STATE_FILE + + +def runtime_storage_state_path( + runtime_id: str, source_profile_dir: Path | None = None +) -> Path: + """Return the storage-state snapshot path for one runtime's derived session.""" + return runtime_dir(runtime_id, source_profile_dir) / "storage-state.json" + + +def profile_exists(profile_dir: Path | None = None) -> bool: + """Check if a browser profile directory exists and is non-empty.""" + profile_dir = (profile_dir or get_source_profile_dir()).expanduser() + return profile_dir.is_dir() and any(profile_dir.iterdir()) + + +def get_runtime_id() -> str: + """Return a deterministic identity for the current browser runtime.""" + os_name = _normalize_os(platform.system()) + arch = _normalize_arch(platform.machine()) + runtime_kind = "container" if _is_container_runtime() else "host" + return f"{os_name}-{arch}-{runtime_kind}" + + +def _normalize_os(system: str) -> str: + mapping = { + "Darwin": "macos", + "Linux": "linux", + "Windows": "windows", + } + return mapping.get(system, system.lower() or "unknown") + + +def _normalize_arch(machine: str) -> str: + value = machine.lower() + if value in {"x86_64", "amd64"}: + return "amd64" + if value in {"arm64", "aarch64"}: + return "arm64" + return value or "unknown" + + +def _is_container_runtime() -> bool: + if Path("/.dockerenv").exists(): + return True + + cgroup = Path("/proc/1/cgroup") + if cgroup.exists(): + try: + text = cgroup.read_text() + except OSError: + text = "" + markers = ("docker", "containerd", "kubepods", "podman") + if any(marker in text for marker in markers): + return True + + return False + + +def load_source_state(source_profile_dir: Path | None = None) -> SourceState | None: + """Load the source session metadata if present.""" + data = _load_json(source_state_path(source_profile_dir)) + if not data: + return None + try: + return SourceState(**data) + except TypeError: + logger.warning("Ignoring invalid source-state.json") + return None + + +def write_source_state(source_profile_dir: Path | None = None) -> SourceState: + """Write a fresh source session generation after successful login.""" + profile_dir = ( + (source_profile_dir or get_source_profile_dir()).expanduser().resolve() + ) + state = SourceState( + version=1, + source_runtime_id=get_runtime_id(), + login_generation=str(uuid4()), + created_at=_utcnow(), + profile_path=str(profile_dir), + cookies_path=str(portable_cookie_path(profile_dir)), + ) + _write_json(source_state_path(profile_dir), asdict(state)) + return state + + +def load_runtime_state( + runtime_id: str, source_profile_dir: Path | None = None +) -> RuntimeState | None: + """Load one derived runtime's metadata if present.""" + data = _load_json(runtime_state_path(runtime_id, source_profile_dir)) + if not data: + return None + try: + return RuntimeState(**data) + except TypeError: + logger.warning("Ignoring invalid runtime-state.json for %s", runtime_id) + return None + + +def write_runtime_state( + runtime_id: str, + source_state: SourceState, + storage_state_path: Path, + source_profile_dir: Path | None = None, + *, + commit_method: str = "checkpoint_restart", +) -> RuntimeState: + """Write metadata for a derived runtime session.""" + profile_dir = runtime_profile_dir(runtime_id, source_profile_dir).resolve() + committed_at = _utcnow() + state = RuntimeState( + version=1, + runtime_id=runtime_id, + source_runtime_id=source_state.source_runtime_id, + source_login_generation=source_state.login_generation, + created_at=committed_at, + committed_at=committed_at, + profile_path=str(profile_dir), + storage_state_path=str(storage_state_path.resolve()), + commit_method=commit_method, + ) + _write_json(runtime_state_path(runtime_id, source_profile_dir), asdict(state)) + return state + + +def clear_runtime_profile( + runtime_id: str, source_profile_dir: Path | None = None +) -> bool: + """Remove one derived runtime profile and its metadata.""" + target = runtime_dir(runtime_id, source_profile_dir) + if not target.exists(): + return True + try: + shutil.rmtree(target) + return True + except OSError as exc: + logger.warning("Could not clear runtime profile %s: %s", target, exc) + return False + + +def clear_auth_state(source_profile_dir: Path | None = None) -> bool: + """Remove source auth artifacts and all derived runtime profiles.""" + profile_dir = (source_profile_dir or get_source_profile_dir()).expanduser() + targets = [ + profile_dir, + portable_cookie_path(profile_dir), + source_state_path(profile_dir), + runtime_profiles_root(profile_dir), + ] + + success = True + for target in targets: + if not target.exists(): + continue + try: + if target.is_dir(): + shutil.rmtree(target) + else: + target.unlink() + except OSError as exc: + logger.warning("Could not clear auth artifact %s: %s", target, exc) + success = False + return success + + +def _load_json(path: Path) -> dict[str, Any] | None: + if not path.exists(): + return None + try: + data = json.loads(path.read_text()) + except (OSError, json.JSONDecodeError): + logger.warning("Ignoring unreadable auth state file: %s", path) + return None + if not isinstance(data, dict): + logger.warning("Ignoring malformed auth state file: %s", path) + return None + return data + + +def _write_json(path: Path, payload: dict[str, Any]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n") + + +def _utcnow() -> str: + return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 1d19ea76..407b4fed 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -10,9 +10,11 @@ from linkedin_mcp_server.core import ( BrowserManager, + resolve_remember_me_prompt, wait_for_manual_login, warm_up_browser, ) +from linkedin_mcp_server.session_state import write_source_state from linkedin_mcp_server.drivers.browser import get_profile_dir @@ -52,6 +54,13 @@ async def interactive_login( # Navigate to LinkedIn login await browser.page.goto("https://www.linkedin.com/login") + # Let LinkedIn finish rendering the saved-account chooser, then retry the + # same exact click target a few times before falling back to the normal + # manual-login wait loop. + for _ in range(3): + await asyncio.sleep(2) + if await resolve_remember_me_prompt(browser.page): + break # Wait for manual login completion # 5 minute timeout (300000ms) allows time for 2FA, captcha, security challenges @@ -68,10 +77,14 @@ async def interactive_login( print(" Waiting longer for cookie propagation...") await asyncio.sleep(5) - # Export cookies for cross-platform portability (macOS -> Docker) + # Export source-session cookies for the one-time foreign-runtime bridge. + # Docker now checkpoint-commits its own derived runtime profile after the + # first successful /feed/ recovery instead of relying on browser teardown. if await browser.export_cookies(): print(" Cookies exported for Docker portability") + source_state = write_source_state(user_data_dir) + print(f" Source session generation: {source_state.login_generation}") print(f"Profile saved to {user_data_dir}") return True diff --git a/scripts/debug_cookie_bridge.py b/scripts/debug_cookie_bridge.py new file mode 100644 index 00000000..0d4b7d59 --- /dev/null +++ b/scripts/debug_cookie_bridge.py @@ -0,0 +1,345 @@ +"""Manual cookie-bridge debugger for cross-platform LinkedIn sessions. + +This script is intentionally not part of the automated test suite. Use it +sparingly to inspect how a host-authenticated session behaves when replayed +into a fresh browser profile, including Docker/Linux runs. +""" + +from __future__ import annotations + +import argparse +import asyncio +import json +import re +import shutil +import tempfile +from pathlib import Path +from typing import Any, cast + +from patchright._impl._api_structures import SetCookieParam + +from linkedin_mcp_server.core.auth import detect_auth_barrier, is_logged_in +from linkedin_mcp_server.core.browser import BrowserManager + + +DEFAULT_TARGET_URL = "https://www.linkedin.com/in/williamhgates/" +_SETTLE_DELAY_SECONDS = 10.0 + +COOKIE_PRESETS: dict[str, set[str] | None] = { + "li_at_only": {"li_at"}, + "auth_minimal": {"li_at", "JSESSIONID", "bcookie", "bscookie", "lidc"}, + "auth_only": {"li_at", "li_rm"}, + "bridge_core": { + "li_at", + "li_rm", + "JSESSIONID", + "bcookie", + "bscookie", + "liap", + "lidc", + "li_gc", + "lang", + "timezone", + "li_mc", + }, + "full": None, +} + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--cookie-path", + type=Path, + default=Path.home() / ".linkedin-mcp" / "cookies.json", + help="Path to portable LinkedIn cookie JSON", + ) + parser.add_argument( + "--candidate", + choices=sorted(COOKIE_PRESETS), + default="bridge_core", + help="Cookie subset to replay", + ) + parser.add_argument( + "--target-url", + default=DEFAULT_TARGET_URL, + help="Authenticated page to probe after bridge replay", + ) + parser.add_argument( + "--pre-nav", + action=argparse.BooleanOptionalAction, + default=True, + help="Navigate to /feed before importing cookies", + ) + parser.add_argument( + "--clear-existing", + action=argparse.BooleanOptionalAction, + default=False, + help="Clear fresh browser cookies before import", + ) + parser.add_argument( + "--body-lines", + type=int, + default=20, + help="Number of non-empty body lines to include in the report", + ) + parser.add_argument( + "--output", + type=Path, + help="Optional path to write JSON report", + ) + parser.add_argument( + "--artifact-dir", + type=Path, + help="Optional directory for screenshots and other debug artifacts", + ) + parser.add_argument( + "--checkpoint-restart", + action=argparse.BooleanOptionalAction, + default=True, + help="Close and reopen the same profile after a successful bridge replay", + ) + return parser.parse_args() + + +def load_portable_cookies( + cookie_path: Path, + candidate: str, +) -> list[dict[str, Any]]: + all_cookies = json.loads(cookie_path.read_text()) + normalized = [ + BrowserManager._normalize_cookie_domain(cookie) + for cookie in all_cookies + if "linkedin.com" in cookie.get("domain", "") + ] + keep_names = COOKIE_PRESETS[candidate] + if keep_names is None: + return normalized + return [cookie for cookie in normalized if cookie.get("name") in keep_names] + + +async def capture_page_state(page, *, body_lines: int) -> dict[str, Any]: + try: + title = await page.title() + except Exception as exc: # pragma: no cover - best effort diagnostics + title = f"" + + try: + body_text = await page.locator("body").inner_text(timeout=3000) + except Exception as exc: # pragma: no cover - best effort diagnostics + body_text = f"" + + body_lines_trimmed = [] + if isinstance(body_text, str) and not body_text.startswith(" str: + return re.sub(r"[^a-z0-9]+", "-", step.lower()).strip("-") + + +def _resolve_artifact_dir(args: argparse.Namespace) -> Path | None: + if args.artifact_dir: + return args.artifact_dir.expanduser().resolve() + if args.output: + return args.output.expanduser().resolve().with_suffix("").parent / ( + args.output.stem + "_artifacts" + ) + return None + + +async def capture_screenshot(page, step: str, artifact_dir: Path | None) -> str | None: + if artifact_dir is None: + return None + + artifact_dir.mkdir(parents=True, exist_ok=True) + path = artifact_dir / f"{_slugify_step(step)}.png" + try: + await page.screenshot(path=str(path), full_page=True) + return str(path) + except Exception as exc: # pragma: no cover - best effort diagnostics + return f"" + + +async def safe_goto(page, url: str) -> dict[str, Any]: + try: + await page.goto(url, wait_until="domcontentloaded", timeout=15000) + return {"ok": True} + except Exception as exc: # pragma: no cover - best effort diagnostics + return {"ok": False, "error": f"{type(exc).__name__}: {exc}"} + + +async def settle_page(page) -> None: + """Give LinkedIn time to finish redirects and hydrate content.""" + await asyncio.sleep(_SETTLE_DELAY_SECONDS) + try: + await page.wait_for_load_state("networkidle", timeout=5000) + except Exception: # pragma: no cover - best effort diagnostics + pass + await asyncio.sleep(1) + + +async def _capture_step( + report: dict[str, Any], + page, + *, + step: str, + body_lines: int, + artifact_dir: Path | None, +) -> None: + await settle_page(page) + report[f"{step}_screenshot"] = await capture_screenshot(page, step, artifact_dir) + report[step] = await capture_page_state(page, body_lines=body_lines) + + +async def run_debug(args: argparse.Namespace) -> dict[str, Any]: + imported_cookies = load_portable_cookies(args.cookie_path, args.candidate) + artifact_dir = _resolve_artifact_dir(args) + + temp_dir = Path(tempfile.mkdtemp(prefix="linkedin-cookie-debug-")) + profile_dir = temp_dir / "profile" + + report: dict[str, Any] = { + "cookie_path": str(args.cookie_path), + "candidate": args.candidate, + "import_cookie_names": [cookie["name"] for cookie in imported_cookies], + "pre_nav": args.pre_nav, + "clear_existing": args.clear_existing, + "checkpoint_restart": args.checkpoint_restart, + "target_url": args.target_url, + "temp_profile_dir": str(profile_dir), + } + if artifact_dir is not None: + report["artifact_dir"] = str(artifact_dir) + + browser = BrowserManager(user_data_dir=profile_dir, headless=True) + try: + await browser.start() + await _capture_step( + report, + browser.page, + step="start", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + if args.pre_nav: + report["pre_nav_result"] = await safe_goto( + browser.page, + "https://www.linkedin.com/feed/", + ) + await _capture_step( + report, + browser.page, + step="after_pre_nav", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + if args.clear_existing: + await browser.context.clear_cookies() + + await browser.context.add_cookies(cast(list[SetCookieParam], imported_cookies)) + await _capture_step( + report, + browser.page, + step="after_import", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + report["feed_nav_result"] = await safe_goto( + browser.page, + "https://www.linkedin.com/feed/", + ) + await _capture_step( + report, + browser.page, + step="after_feed_nav", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + report["target_nav_result"] = await safe_goto(browser.page, args.target_url) + await _capture_step( + report, + browser.page, + step="after_target_nav", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + if args.checkpoint_restart: + storage_state_path = temp_dir / "storage-state.json" + report["storage_state_exported"] = await browser.export_storage_state( + storage_state_path, indexed_db=True + ) + report["storage_state_path"] = str(storage_state_path) + await browser.close() + + reopened = BrowserManager(user_data_dir=profile_dir, headless=True) + try: + await reopened.start() + report["reopened_feed_nav_result"] = await safe_goto( + reopened.page, + "https://www.linkedin.com/feed/", + ) + await _capture_step( + report, + reopened.page, + step="after_reopened_feed_nav", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + + report["reopened_target_nav_result"] = await safe_goto( + reopened.page, + args.target_url, + ) + await _capture_step( + report, + reopened.page, + step="after_reopened_target_nav", + body_lines=args.body_lines, + artifact_dir=artifact_dir, + ) + finally: + await reopened.close() + return report + finally: + await browser.close() + shutil.rmtree(temp_dir, ignore_errors=True) + + +def main() -> None: + args = parse_args() + report = asyncio.run(run_debug(args)) + rendered = json.dumps(report, indent=2, ensure_ascii=True) + if args.output: + args.output.write_text(rendered + "\n") + print(rendered) + + +if __name__ == "__main__": + main() diff --git a/tests/conftest.py b/tests/conftest.py index cddf48e9..a329336c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,6 +25,7 @@ def isolate_profile_dir(tmp_path, monkeypatch): "linkedin_mcp_server.authentication", "linkedin_mcp_server.cli_main", "linkedin_mcp_server.setup", + "linkedin_mcp_server.session_state", ]: try: monkeypatch.setattr(f"{module}.DEFAULT_PROFILE_DIR", fake_profile) @@ -43,6 +44,26 @@ def isolate_profile_dir(tmp_path, monkeypatch): except AttributeError: pass + try: + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_source_profile_dir", + lambda: fake_profile, + ) + except AttributeError: + pass + + for source_module in [ + "linkedin_mcp_server.authentication", + "linkedin_mcp_server.drivers.browser", + ]: + try: + monkeypatch.setattr( + f"{source_module}.get_source_profile_dir", + lambda: fake_profile, + ) + except AttributeError: + pass + return fake_profile diff --git a/tests/test_authentication.py b/tests/test_authentication.py index de1c146e..96fcb6c3 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -1,58 +1,75 @@ +import json + import pytest -from linkedin_mcp_server.authentication import clear_profile, get_authentication_source +from linkedin_mcp_server.authentication import ( + clear_auth_state, + clear_profile, + get_authentication_source, +) from linkedin_mcp_server.drivers.browser import profile_exists from linkedin_mcp_server.exceptions import CredentialsNotFoundError +from linkedin_mcp_server.session_state import ( + portable_cookie_path, + runtime_profile_dir, + runtime_state_path, + source_state_path, +) -# --- profile_exists() tests --- +def _write_source_metadata(profile_dir, *, runtime_id="macos-arm64-host"): + portable_cookie_path(profile_dir).write_text( + json.dumps([{"name": "li_at", "domain": ".linkedin.com"}]) + ) + source_state_path(profile_dir).write_text( + json.dumps( + { + "version": 1, + "source_runtime_id": runtime_id, + "login_generation": "gen-1", + "created_at": "2026-03-12T17:00:00Z", + "profile_path": str(profile_dir), + "cookies_path": str(portable_cookie_path(profile_dir)), + } + ) + ) def test_profile_exists_missing_dir(tmp_path): - """Missing directory returns False.""" assert profile_exists(tmp_path / "nonexistent") is False def test_profile_exists_empty_dir(tmp_path): - """Empty directory returns False.""" empty = tmp_path / "empty" empty.mkdir() assert profile_exists(empty) is False def test_profile_exists_non_empty_dir(profile_dir): - """Non-empty directory returns True.""" assert profile_exists(profile_dir) is True def test_profile_exists_file_path(tmp_path): - """A file (not directory) returns False.""" - f = tmp_path / "not_a_dir" - f.write_text("data") - assert profile_exists(f) is False + file_path = tmp_path / "not_a_dir" + file_path.write_text("data") + assert profile_exists(file_path) is False -# --- get_authentication_source() tests --- +def test_get_authentication_source_requires_metadata(profile_dir): + with pytest.raises(CredentialsNotFoundError, match="source session metadata"): + get_authentication_source() -def test_get_auth_source_profile(profile_dir, monkeypatch): - monkeypatch.setattr( - "linkedin_mcp_server.authentication.profile_exists", lambda _dir=None: True - ) +def test_get_authentication_source_accepts_source_session(profile_dir): + _write_source_metadata(profile_dir) assert get_authentication_source() is True -def test_get_auth_source_none_raises(monkeypatch): - monkeypatch.setattr( - "linkedin_mcp_server.authentication.profile_exists", lambda _dir=None: False - ) +def test_get_authentication_source_none_raises(isolate_profile_dir): with pytest.raises(CredentialsNotFoundError): get_authentication_source() -# --- clear_profile() tests --- - - def test_clear_profile_removes_dir(profile_dir): assert profile_dir.exists() result = clear_profile(profile_dir) @@ -60,6 +77,25 @@ def test_clear_profile_removes_dir(profile_dir): assert not profile_dir.exists() -def test_clear_profile_no_dir(isolate_profile_dir): - result = clear_profile(isolate_profile_dir) - assert result is True # No error even if dir doesn't exist +def test_clear_auth_state_removes_source_and_runtime_files(profile_dir): + _write_source_metadata(profile_dir) + runtime_profile = runtime_profile_dir("linux-amd64-container", profile_dir) + runtime_profile.mkdir(parents=True) + runtime_state_path("linux-amd64-container", profile_dir).write_text( + json.dumps( + { + "version": 1, + "runtime_id": "linux-amd64-container", + "source_runtime_id": "macos-arm64-host", + "source_login_generation": "gen-1", + "created_at": "2026-03-12T17:10:00Z", + "profile_path": str(runtime_profile), + } + ) + ) + + assert clear_auth_state(profile_dir) is True + assert not profile_dir.exists() + assert not portable_cookie_path(profile_dir).exists() + assert not source_state_path(profile_dir).exists() + assert not runtime_profile_dir("linux-amd64-container", profile_dir).exists() diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 1b5740be..27ac71bc 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -1,5 +1,6 @@ -"""Tests for linkedin_mcp_server.drivers.browser singleton lifecycle.""" +"""Tests for linkedin_mcp_server.drivers.browser runtime-aware auth startup.""" +import json from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -9,11 +10,17 @@ get_or_create_browser, reset_browser_for_testing, ) +from linkedin_mcp_server.session_state import ( + portable_cookie_path, + runtime_profile_dir, + runtime_state_path, + runtime_storage_state_path, + source_state_path, +) @pytest.fixture(autouse=True) def _reset_browser(): - """Ensure clean singleton state for each test.""" reset_browser_for_testing() yield reset_browser_for_testing() @@ -21,7 +28,6 @@ def _reset_browser(): @pytest.fixture(autouse=True) def _mock_config(monkeypatch, tmp_path): - """Provide a test config so get_config() never triggers argparse.""" config = AppConfig() config.browser.user_data_dir = str(tmp_path / "profile") monkeypatch.setattr( @@ -29,91 +35,407 @@ def _mock_config(monkeypatch, tmp_path): ) -def _make_mock_browser(*, logged_in: bool = True) -> MagicMock: - """Create a mock BrowserManager with controllable login state.""" +def _make_mock_browser() -> MagicMock: browser = MagicMock() browser.start = AsyncMock() browser.close = AsyncMock() browser.page = MagicMock() + browser.page.url = "https://www.linkedin.com/feed/" browser.page.goto = AsyncMock() browser.page.set_default_timeout = MagicMock() + browser.page.title = AsyncMock(return_value="LinkedIn") + browser.page.evaluate = AsyncMock(return_value="Feed") + locator = MagicMock() + locator.count = AsyncMock(return_value=0) + browser.page.locator = MagicMock(return_value=locator) browser.import_cookies = AsyncMock(return_value=False) browser.export_cookies = AsyncMock(return_value=False) + browser.export_storage_state = AsyncMock(return_value=True) return browser +def _write_source_state(tmp_path, *, runtime_id: str, login_generation: str = "gen-1"): + profile_dir = tmp_path / "profile" + profile_dir.mkdir(parents=True, exist_ok=True) + (profile_dir / "Default").mkdir(parents=True, exist_ok=True) + (profile_dir / "Default" / "Cookies").write_text("placeholder") + portable_cookie_path(profile_dir).write_text( + json.dumps([{"name": "li_at", "domain": ".linkedin.com"}]) + ) + source_state_path(profile_dir).write_text( + json.dumps( + { + "version": 1, + "source_runtime_id": runtime_id, + "login_generation": login_generation, + "created_at": "2026-03-12T17:00:00Z", + "profile_path": str(profile_dir), + "cookies_path": str(portable_cookie_path(profile_dir)), + } + ) + ) + return profile_dir + + +def _write_runtime_state( + tmp_path, + runtime_id: str, + *, + source_runtime_id: str = "macos-arm64-host", + source_login_generation: str = "gen-1", + with_storage_state: bool = True, +): + profile_dir = runtime_profile_dir(runtime_id, tmp_path / "profile") + profile_dir.mkdir(parents=True, exist_ok=True) + (profile_dir / "Default").mkdir(parents=True, exist_ok=True) + (profile_dir / "Default" / "Cookies").write_text("placeholder") + storage_state_path = runtime_storage_state_path(runtime_id, tmp_path / "profile") + if with_storage_state: + storage_state_path.parent.mkdir(parents=True, exist_ok=True) + storage_state_path.write_text("{}") + runtime_state_path(runtime_id, tmp_path / "profile").write_text( + json.dumps( + { + "version": 1, + "runtime_id": runtime_id, + "source_runtime_id": source_runtime_id, + "source_login_generation": source_login_generation, + "created_at": "2026-03-12T17:10:00Z", + "committed_at": "2026-03-12T17:10:05Z", + "profile_path": str(profile_dir), + "storage_state_path": str(storage_state_path), + "commit_method": "checkpoint_restart", + } + ) + ) + return profile_dir + + +@pytest.mark.asyncio +async def test_get_or_create_browser_requires_source_state(): + from linkedin_mcp_server.core import AuthenticationError + + with pytest.raises(AuthenticationError): + await get_or_create_browser() + + +@pytest.mark.asyncio +async def test_same_runtime_uses_source_profile(tmp_path): + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + source_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="macos-arm64-host", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=source_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + assert result is source_browser + ctor.assert_called_once() + assert ctor.call_args.kwargs["user_data_dir"] == tmp_path / "profile" + source_browser.import_cookies.assert_not_awaited() + + @pytest.mark.asyncio -async def test_get_or_create_browser_auth_success(monkeypatch): - """Successful auth assigns singleton and returns browser.""" - mock_browser = _make_mock_browser() +async def test_same_runtime_clicks_remember_me_during_feed_validation(tmp_path): + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + source_browser = _make_mock_browser() with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="macos-arm64-host", + ), patch( "linkedin_mcp_server.drivers.browser.BrowserManager", - return_value=mock_browser, + return_value=source_browser, ), patch( - "linkedin_mcp_server.drivers.browser.is_logged_in", + "linkedin_mcp_server.drivers.browser.resolve_remember_me_prompt", new_callable=AsyncMock, return_value=True, + ) as remember_me, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, ), ): result = await get_or_create_browser() - assert result is mock_browser - mock_browser.start.assert_awaited_once() - mock_browser.page.goto.assert_awaited_once() + assert result is source_browser + assert source_browser.page.goto.await_count == 1 + assert remember_me.await_count == 1 @pytest.mark.asyncio -async def test_get_or_create_browser_auth_failure_cleans_up(monkeypatch): - """Failed auth closes browser and does NOT assign singleton.""" +async def test_derived_runtime_reuses_matching_committed_profile(tmp_path): + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + derived_profile = _write_runtime_state(tmp_path, "linux-amd64-container") + derived_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=derived_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + assert result is derived_browser + assert ctor.call_args.kwargs["user_data_dir"] == derived_profile + derived_browser.import_cookies.assert_not_awaited() + derived_browser.export_storage_state.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_missing_derived_runtime_bridges_and_checkpoint_commits(tmp_path): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + reopened_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + side_effect=[first_browser, reopened_browser], + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + expected_profile = runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ) + expected_storage = runtime_storage_state_path( + "linux-amd64-container", tmp_path / "profile" + ) + assert result is reopened_browser + assert ctor.call_count == 2 + assert ctor.call_args_list[0].kwargs["user_data_dir"] == expected_profile + assert ctor.call_args_list[1].kwargs["user_data_dir"] == expected_profile + first_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) + first_browser.export_storage_state.assert_awaited_once_with( + expected_storage, + indexed_db=True, + ) + first_browser.close.assert_awaited_once() + runtime_state = json.loads( + runtime_state_path("linux-amd64-container", tmp_path / "profile").read_text() + ) + assert runtime_state["source_login_generation"] == "gen-2" + assert runtime_state["storage_state_path"] == str(expected_storage.resolve()) + + +@pytest.mark.asyncio +async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( + tmp_path, monkeypatch +): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "1") + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=first_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + assert result is first_browser + assert ctor.call_count == 1 + first_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) + first_browser.export_storage_state.assert_not_awaited() + first_browser.close.assert_not_awaited() + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + + +@pytest.mark.asyncio +async def test_stale_derived_runtime_rebuilds_from_new_generation(tmp_path): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-3" + ) + stale_profile = _write_runtime_state( + tmp_path, + "linux-amd64-container", + source_login_generation="old-gen", + ) + old_marker = stale_profile / "stale.txt" + old_marker.write_text("stale") + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + reopened_browser = _make_mock_browser() + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + side_effect=[first_browser, reopened_browser], + ), + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + await get_or_create_browser() + + assert not old_marker.exists() + runtime_state = json.loads( + runtime_state_path("linux-amd64-container", tmp_path / "profile").read_text() + ) + assert runtime_state["source_login_generation"] == "gen-3" + + +@pytest.mark.asyncio +async def test_matching_derived_runtime_failure_does_not_fallback_to_bridge(tmp_path): from linkedin_mcp_server.core import AuthenticationError - mock_browser = _make_mock_browser() + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + _write_runtime_state(tmp_path, "linux-amd64-container") + invalid_browser = _make_mock_browser() with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), patch( "linkedin_mcp_server.drivers.browser.BrowserManager", - return_value=mock_browser, + return_value=invalid_browser, ), patch( - "linkedin_mcp_server.drivers.browser.is_logged_in", + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", new_callable=AsyncMock, - return_value=False, + return_value="login title: linkedin login", ), pytest.raises(AuthenticationError), ): await get_or_create_browser() - # Browser must be closed on failure - mock_browser.close.assert_awaited_once() + invalid_browser.import_cookies.assert_not_awaited() - # Singleton must NOT be set โ€” next call should create fresh browser - from linkedin_mcp_server.drivers.browser import _browser - assert _browser is None +@pytest.mark.asyncio +async def test_checkpoint_reopen_failure_clears_runtime_dir(tmp_path): + from linkedin_mcp_server.core import AuthenticationError + + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + reopened_browser = _make_mock_browser() + + barrier_mock = AsyncMock(side_effect=[None, "checkpoint"]) + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + side_effect=[first_browser, reopened_browser], + ), + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + barrier_mock, + ), + pytest.raises(AuthenticationError), + ): + await get_or_create_browser() + + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + assert not runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ).exists() @pytest.mark.asyncio -async def test_singleton_returns_existing_browser(monkeypatch): - """Second call returns the same browser instance (singleton).""" - mock_browser = _make_mock_browser() +async def test_bridge_validation_failure_before_commit_clears_runtime_dir(tmp_path): + from linkedin_mcp_server.core import AuthenticationError + + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + barrier_mock = AsyncMock(return_value="login title: linkedin login") with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), patch( "linkedin_mcp_server.drivers.browser.BrowserManager", - return_value=mock_browser, - ) as ctor, + return_value=first_browser, + ), patch( - "linkedin_mcp_server.drivers.browser.is_logged_in", - new_callable=AsyncMock, - return_value=True, + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + barrier_mock, ), + pytest.raises(AuthenticationError), ): - first = await get_or_create_browser() - second = await get_or_create_browser() + await get_or_create_browser() - assert first is second - # Constructor should only be called once - ctor.assert_called_once() + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + assert not runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ).exists() diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py index 5965fc62..9b875085 100644 --- a/tests/test_cli_main.py +++ b/tests/test_cli_main.py @@ -1,8 +1,9 @@ """Tests for CLI startup behavior and transport selection.""" import importlib.metadata +import json from typing import Literal -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest @@ -166,3 +167,164 @@ def test_main_non_interactive_auth_failure_has_no_stdout( assert exit_info.value.code == 1 captured = capsys.readouterr() assert captured.out == "" + + +def test_profile_info_reports_bridge_required_for_foreign_runtime( + monkeypatch: pytest.MonkeyPatch, + capsys: pytest.CaptureFixture[str], + tmp_path, +) -> None: + profile_dir = tmp_path / "profile" + profile_dir.mkdir(parents=True) + (profile_dir / "Default").mkdir(parents=True) + (profile_dir / "Default" / "Cookies").write_text("placeholder") + (tmp_path / "cookies.json").write_text(json.dumps([{"name": "li_at"}])) + (tmp_path / "source-state.json").write_text( + json.dumps( + { + "version": 1, + "source_runtime_id": "macos-arm64-host", + "login_generation": "gen-1", + "created_at": "2026-03-12T17:00:00Z", + "profile_path": str(profile_dir), + "cookies_path": str(tmp_path / "cookies.json"), + } + ) + ) + + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_profile_dir", lambda: profile_dir + ) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_runtime_id", lambda: "linux-amd64-container" + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_config", lambda: AppConfig()) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_version", lambda: "4.0.0") + + with pytest.raises(SystemExit) as exit_info: + cli_main.profile_info_and_exit() + + assert exit_info.value.code == 0 + captured = capsys.readouterr() + assert "derived (missing)" in captured.out.lower() + assert "checkpoint-committed" in captured.out.lower() + + +def test_profile_info_reports_committed_derived_runtime( + monkeypatch: pytest.MonkeyPatch, + capsys: pytest.CaptureFixture[str], + tmp_path, +) -> None: + profile_dir = tmp_path / "profile" + profile_dir.mkdir(parents=True) + (profile_dir / "Default").mkdir(parents=True) + (profile_dir / "Default" / "Cookies").write_text("placeholder") + runtime_profile = ( + tmp_path / "runtime-profiles" / "linux-amd64-container" / "profile" + ) + runtime_profile.mkdir(parents=True) + (runtime_profile / "Default").mkdir(parents=True) + (runtime_profile / "Default" / "Cookies").write_text("placeholder") + storage_state = ( + tmp_path / "runtime-profiles" / "linux-amd64-container" / "storage-state.json" + ) + storage_state.write_text("{}") + (tmp_path / "cookies.json").write_text(json.dumps([{"name": "li_at"}])) + (tmp_path / "source-state.json").write_text( + json.dumps( + { + "version": 1, + "source_runtime_id": "macos-arm64-host", + "login_generation": "gen-1", + "created_at": "2026-03-12T17:00:00Z", + "profile_path": str(profile_dir), + "cookies_path": str(tmp_path / "cookies.json"), + } + ) + ) + ( + tmp_path / "runtime-profiles" / "linux-amd64-container" / "runtime-state.json" + ).write_text( + json.dumps( + { + "version": 1, + "runtime_id": "linux-amd64-container", + "source_runtime_id": "macos-arm64-host", + "source_login_generation": "gen-1", + "created_at": "2026-03-12T17:10:00Z", + "committed_at": "2026-03-12T17:10:05Z", + "profile_path": str(runtime_profile), + "storage_state_path": str(storage_state), + "commit_method": "checkpoint_restart", + } + ) + ) + + browser = MagicMock() + browser.is_authenticated = True + + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_profile_dir", lambda: profile_dir + ) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_runtime_id", lambda: "linux-amd64-container" + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_config", lambda: AppConfig()) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_version", lambda: "4.0.0") + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_or_create_browser", + AsyncMock(return_value=browser), + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.close_browser", AsyncMock()) + + with pytest.raises(SystemExit) as exit_info: + cli_main.profile_info_and_exit() + + assert exit_info.value.code == 0 + captured = capsys.readouterr() + assert "derived (committed, current generation)" in captured.out.lower() + assert str(storage_state) in captured.out + + +def test_clear_profile_and_exit_clears_all_auth_state( + monkeypatch: pytest.MonkeyPatch, + capsys: pytest.CaptureFixture[str], + tmp_path, +) -> None: + config = AppConfig() + config.browser.user_data_dir = str(tmp_path / "profile") + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_config", lambda: config) + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None + ) + monkeypatch.setattr("linkedin_mcp_server.cli_main.get_version", lambda: "4.0.0") + monkeypatch.setattr( + "linkedin_mcp_server.cli_main.get_profile_dir", lambda: tmp_path / "profile" + ) + monkeypatch.setattr("builtins.input", lambda _prompt="": "y") + + profile_dir = tmp_path / "profile" + profile_dir.mkdir(parents=True) + (tmp_path / "source-state.json").write_text("{}") + + cleared = {} + + def fake_clear(profile): + cleared["profile"] = profile + return True + + monkeypatch.setattr("linkedin_mcp_server.cli_main.clear_auth_state", fake_clear) + + with pytest.raises(SystemExit) as exit_info: + cli_main.clear_profile_and_exit() + + assert exit_info.value.code == 0 + assert cleared["profile"] == profile_dir + captured = capsys.readouterr() + assert "authentication state cleared" in captured.out.lower() diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index 8bb4e03c..f9115a88 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -7,6 +7,9 @@ from linkedin_mcp_server.core.auth import ( detect_auth_barrier, detect_auth_barrier_quick, + is_logged_in, + resolve_remember_me_prompt, + wait_for_manual_login, ) @@ -78,6 +81,30 @@ async def test_detect_auth_barrier_quick_skips_body_text_on_authenticated_page() page.evaluate.assert_not_awaited() +@pytest.mark.asyncio +async def test_is_logged_in_rejects_empty_authenticated_only_page(): + page = MagicMock() + page.url = "https://www.linkedin.com/feed/" + page.locator.return_value.count = AsyncMock(return_value=0) + page.evaluate = AsyncMock(return_value="") + + result = await is_logged_in(page) + + assert result is False + + +@pytest.mark.asyncio +async def test_is_logged_in_accepts_authenticated_only_page_with_content(): + page = MagicMock() + page.url = "https://www.linkedin.com/feed/" + page.locator.return_value.count = AsyncMock(return_value=0) + page.evaluate = AsyncMock(return_value="Home\nMy Network\nJobs") + + result = await is_logged_in(page) + + assert result is True + + @pytest.mark.asyncio async def test_detect_auth_barrier_ignores_continue_as_in_page_content(): page = MagicMock() @@ -116,3 +143,56 @@ async def test_detect_auth_barrier_ignores_auth_substrings_in_slugs(): result = await detect_auth_barrier(page) assert result is None + + +@pytest.mark.asyncio +async def test_resolve_remember_me_prompt_clicks_saved_account(): + page = MagicMock() + target = MagicMock() + target.wait_for = AsyncMock() + target.scroll_into_view_if_needed = AsyncMock() + target.click = AsyncMock() + target.first = target + page.locator.return_value = target + page.wait_for_selector = AsyncMock() + page.wait_for_load_state = AsyncMock() + + result = await resolve_remember_me_prompt(page) + + assert result is True + target.click.assert_awaited_once() + page.wait_for_load_state.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_resolve_remember_me_prompt_returns_false_when_absent(): + page = MagicMock() + page.wait_for_selector = AsyncMock(side_effect=Exception("missing")) + + result = await resolve_remember_me_prompt(page) + + assert result is False + + +@pytest.mark.asyncio +async def test_wait_for_manual_login_clicks_saved_account(monkeypatch): + page = MagicMock() + clicked = {"value": False} + + async def fake_resolve(_page): + if not clicked["value"]: + clicked["value"] = True + return True + return False + + async def fake_is_logged_in(_page): + return clicked["value"] + + monkeypatch.setattr( + "linkedin_mcp_server.core.auth.resolve_remember_me_prompt", fake_resolve + ) + monkeypatch.setattr("linkedin_mcp_server.core.auth.is_logged_in", fake_is_logged_in) + + await wait_for_manual_login(page, timeout=1000) + + assert clicked["value"] is True diff --git a/tests/test_core_browser.py b/tests/test_core_browser.py new file mode 100644 index 00000000..95df10e8 --- /dev/null +++ b/tests/test_core_browser.py @@ -0,0 +1,162 @@ +"""Tests for BrowserManager cookie import/export helpers.""" + +import json +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from linkedin_mcp_server.core.browser import BrowserManager + + +def _make_cookie( + name: str, + value: str = "value", + *, + domain: str = ".linkedin.com", +) -> dict[str, str]: + return { + "name": name, + "value": value, + "domain": domain, + "path": "/", + } + + +def _make_browser_manager(tmp_path) -> tuple[BrowserManager, MagicMock]: + browser = BrowserManager(user_data_dir=tmp_path / "profile") + context = MagicMock() + context.clear_cookies = AsyncMock() + context.add_cookies = AsyncMock() + context.storage_state = AsyncMock() + browser._context = context + return browser, context + + +@pytest.mark.asyncio +async def test_import_cookies_imports_bridge_subset_only(tmp_path): + browser, context = _make_browser_manager(tmp_path) + cookie_path = tmp_path / "cookies.json" + cookies = [ + _make_cookie("li_at"), + _make_cookie("JSESSIONID"), + _make_cookie("bcookie"), + _make_cookie("lidc"), + _make_cookie("session", domain=".example.com"), + _make_cookie("timezone"), + ] + cookie_path.write_text(json.dumps(cookies)) + + imported = await browser.import_cookies(cookie_path) + + assert imported is True + context.clear_cookies.assert_not_awaited() + context.add_cookies.assert_awaited_once_with( + [cookies[0], cookies[1], cookies[2], cookies[3], cookies[5]] + ) + + +@pytest.mark.asyncio +async def test_import_cookies_uses_auth_minimal_debug_preset(tmp_path, monkeypatch): + browser, context = _make_browser_manager(tmp_path) + cookie_path = tmp_path / "cookies.json" + cookies = [ + _make_cookie("li_at"), + _make_cookie("JSESSIONID"), + _make_cookie("bcookie"), + _make_cookie("bscookie"), + _make_cookie("lidc"), + _make_cookie("liap"), + _make_cookie("timezone"), + ] + cookie_path.write_text(json.dumps(cookies)) + monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "auth_minimal") + + imported = await browser.import_cookies(cookie_path) + + assert imported is True + context.add_cookies.assert_awaited_once_with(cookies[:5]) + + +@pytest.mark.asyncio +async def test_import_cookies_requires_li_at(tmp_path): + browser, context = _make_browser_manager(tmp_path) + cookie_path = tmp_path / "cookies.json" + cookie_path.write_text( + json.dumps( + [ + _make_cookie("JSESSIONID"), + _make_cookie("bcookie"), + ] + ) + ) + + imported = await browser.import_cookies(cookie_path) + + assert imported is False + context.clear_cookies.assert_not_awaited() + context.add_cookies.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_import_cookies_preserves_existing_cookies(tmp_path): + browser, context = _make_browser_manager(tmp_path) + cookie_path = tmp_path / "cookies.json" + cookie_path.write_text( + json.dumps( + [ + _make_cookie("li_at"), + _make_cookie("li_rm"), + _make_cookie("JSESSIONID"), + ] + ) + ) + + imported = await browser.import_cookies(cookie_path) + + assert imported is True + context.clear_cookies.assert_not_awaited() + context.add_cookies.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_export_storage_state_calls_context_storage_state(tmp_path): + browser, context = _make_browser_manager(tmp_path) + storage_state_path = tmp_path / "storage-state.json" + + exported = await browser.export_storage_state(storage_state_path, indexed_db=True) + + assert exported is True + context.storage_state.assert_awaited_once_with( + path=storage_state_path, + indexed_db=True, + ) + + +@pytest.mark.asyncio +async def test_export_storage_state_requires_context(tmp_path): + browser = BrowserManager(user_data_dir=tmp_path / "profile") + + exported = await browser.export_storage_state(tmp_path / "storage-state.json") + + assert exported is False + + +@pytest.mark.asyncio +async def test_close_is_idempotent_and_resets_state(tmp_path): + browser = BrowserManager(user_data_dir=tmp_path / "profile") + browser._page = MagicMock() + context = MagicMock() + context.close = AsyncMock(side_effect=RuntimeError("boom")) + playwright = MagicMock() + playwright.stop = AsyncMock() + browser._context = context + browser._playwright = playwright + + await browser.close() + await browser.close() + + context.close.assert_awaited_once() + playwright.stop.assert_awaited_once() + assert browser._context is None + assert browser._page is None + assert browser._playwright is None diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 505d16b9..b464cc82 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -130,6 +130,9 @@ def mock_page(): mock_locator.first = mock_locator mock_locator.inner_text = AsyncMock(return_value="normal page content") page.locator.return_value = mock_locator + page.main_frame = object() + page.on = MagicMock() + page.remove_listener = MagicMock() return page @@ -376,6 +379,69 @@ async def test_extract_search_page_raises_auth_error_for_login_barrier( ) +class TestNavigationDiagnostics: + async def test_goto_with_auth_checks_clicks_remember_me_and_retries( + self, mock_page + ): + extractor = LinkedInExtractor(mock_page) + + async def goto_side_effect(*args, **kwargs): + if mock_page.goto.await_count == 1: + raise Exception("net::ERR_TOO_MANY_REDIRECTS") + return None + + mock_page.goto = AsyncMock(side_effect=goto_side_effect) + + with ( + patch( + "linkedin_mcp_server.scraping.extractor.resolve_remember_me_prompt", + new_callable=AsyncMock, + side_effect=[True], + ) as mock_resolve, + patch( + "linkedin_mcp_server.scraping.extractor.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + await extractor._goto_with_auth_checks( + "https://www.linkedin.com/in/testuser/" + ) + + assert mock_page.goto.await_count == 2 + mock_resolve.assert_awaited_once() + + async def test_goto_with_auth_checks_logs_failure_context(self, mock_page): + extractor = LinkedInExtractor(mock_page) + mock_page.goto = AsyncMock(side_effect=Exception("net::ERR_TOO_MANY_REDIRECTS")) + + with ( + patch( + "linkedin_mcp_server.scraping.extractor.resolve_remember_me_prompt", + new_callable=AsyncMock, + return_value=False, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_auth_barrier", + new_callable=AsyncMock, + return_value=None, + ), + patch.object( + extractor, + "_log_navigation_failure", + new_callable=AsyncMock, + ) as mock_log_failure, + pytest.raises(Exception, match="ERR_TOO_MANY_REDIRECTS"), + ): + await extractor._goto_with_auth_checks( + "https://www.linkedin.com/in/testuser/" + ) + + mock_log_failure.assert_awaited_once() + mock_page.on.assert_called_once() + mock_page.remove_listener.assert_called_once() + + class TestScrapePersonUrls: """Test that scrape_person visits the correct URLs per section set.""" diff --git a/tests/test_session_state.py b/tests/test_session_state.py new file mode 100644 index 00000000..f30bf4ba --- /dev/null +++ b/tests/test_session_state.py @@ -0,0 +1,85 @@ +from linkedin_mcp_server.session_state import ( + get_runtime_id, + load_runtime_state, + load_source_state, + runtime_profile_dir, + runtime_storage_state_path, + source_state_path, + write_runtime_state, + write_source_state, +) + + +def test_write_source_state_creates_generation(monkeypatch, isolate_profile_dir): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_runtime_id", + lambda: "macos-arm64-host", + ) + + state = write_source_state(isolate_profile_dir) + + assert state.source_runtime_id == "macos-arm64-host" + assert state.login_generation + assert source_state_path(isolate_profile_dir).exists() + assert load_source_state(isolate_profile_dir) == state + + +def test_write_runtime_state_tracks_source_generation(monkeypatch, isolate_profile_dir): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_runtime_id", + lambda: "macos-arm64-host", + ) + source_state = write_source_state(isolate_profile_dir) + + storage_state_path = runtime_storage_state_path( + "linux-amd64-container", + isolate_profile_dir, + ) + storage_state_path.parent.mkdir(parents=True, exist_ok=True) + storage_state_path.write_text("{}") + + runtime_state = write_runtime_state( + "linux-amd64-container", + source_state, + storage_state_path, + isolate_profile_dir, + ) + + assert runtime_state.source_login_generation == source_state.login_generation + assert runtime_state.commit_method == "checkpoint_restart" + assert runtime_state.storage_state_path == str(storage_state_path.resolve()) + assert runtime_state.committed_at + assert runtime_state.profile_path == str( + runtime_profile_dir("linux-amd64-container", isolate_profile_dir).resolve() + ) + assert ( + load_runtime_state("linux-amd64-container", isolate_profile_dir) + == runtime_state + ) + + +def test_runtime_storage_state_path_uses_runtime_dir(isolate_profile_dir): + assert runtime_storage_state_path( + "linux-amd64-container", + isolate_profile_dir, + ) == ( + isolate_profile_dir.parent + / "runtime-profiles" + / "linux-amd64-container" + / "storage-state.json" + ) + + +def test_get_runtime_id_marks_container(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.system", lambda: "Linux" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.machine", lambda: "x86_64" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.Path.exists", + lambda self: str(self) == "/.dockerenv", + ) + + assert get_runtime_id() == "linux-amd64-container" From 500379ec99f1ec9a78627d98bd39da20fd169322 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Thu, 12 Mar 2026 23:17:13 +0100 Subject: [PATCH 495/565] feat(debug): return issue-ready scrape traces --- AGENTS.md | 3 +- README.md | 2 + linkedin_mcp_server/debug_trace.py | 93 ++++++++++++ linkedin_mcp_server/drivers/browser.py | 64 +++++++- linkedin_mcp_server/error_diagnostics.py | 174 ++++++++++++++++++++++ linkedin_mcp_server/error_handler.py | 91 +++++++---- linkedin_mcp_server/logging_config.py | 9 ++ linkedin_mcp_server/scraping/extractor.py | 117 ++++++++++++++- linkedin_mcp_server/tools/company.py | 5 + tests/test_browser_driver.py | 46 ++++++ tests/test_scraping.py | 47 +++++- tests/test_tools.py | 22 +++ 12 files changed, 638 insertions(+), 35 deletions(-) create mode 100644 linkedin_mcp_server/debug_trace.py create mode 100644 linkedin_mcp_server/error_diagnostics.py diff --git a/AGENTS.md b/AGENTS.md index 6631afe9..3fd48c24 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -71,6 +71,7 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. Tools may also include: - `references: {section_name: [{kind, url, text?, context?}, ...]}` โ€” compact typed link targets for graph expansion. LinkedIn URLs are relative paths such as `/in/stickerdaniel/`; external URLs remain absolute. +- `section_errors: {section_name: {error_type, error_message, issue_template_path, issue_template, runtime, ...}}` when one section failed but the overall tool call still completed. These diagnostics include trace/log locations and an issue-ready markdown template. - `unknown_sections: [name, ...]` when unknown section names were passed. - `job_ids: [id, ...]` for `search_jobs`. @@ -144,7 +145,7 @@ Tools may also include: ## Verifying Bug Reports -Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. When working in this repository, use the local code path with `uv run`, not `uvx`, so the running process reflects the files in your workspace. Use `uvx` only when intentionally verifying the packaged distribution. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: +Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. When working in this repository, use the local code path with `uv run`, not `uvx`, so the running process reflects the files in your workspace. Use `uvx` only when intentionally verifying the packaged distribution. For live Docker investigations, always refresh the source session first with a fresh local `uv run -m linkedin_mcp_server --login` before testing each materially different approach. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: ```bash # Create or refresh the local source session diff --git a/README.md b/README.md index fa7c4d42..bd6d0540 100644 --- a/README.md +++ b/README.md @@ -50,6 +50,8 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company Tool responses keep readable `sections` text and may also include a compact `references` map keyed by section. Each reference includes a typed target, a relative LinkedIn path (or absolute external URL), and a short label/context when available. +When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, runtime/session details, trace and log locations when enabled, and an issue-ready markdown template path. + > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 diff --git a/linkedin_mcp_server/debug_trace.py b/linkedin_mcp_server/debug_trace.py new file mode 100644 index 00000000..62b1e6dd --- /dev/null +++ b/linkedin_mcp_server/debug_trace.py @@ -0,0 +1,93 @@ +"""Best-effort page tracing for manual LinkedIn debugging.""" + +from __future__ import annotations + +import itertools +import json +import os +import re +from pathlib import Path +from typing import Any + +_TRACE_COUNTER = itertools.count(1) + + +def get_trace_dir() -> Path | None: + raw = os.getenv("LINKEDIN_DEBUG_TRACE_DIR", "").strip() + if not raw: + return None + return Path(raw).expanduser().resolve() + + +def _slugify_step(step: str) -> str: + return re.sub(r"[^a-z0-9]+", "-", step.lower()).strip("-") + + +async def record_page_trace( + page: Any, step: str, *, extra: dict[str, Any] | None = None +) -> None: + """Persist a screenshot and basic page state when trace debugging is enabled.""" + trace_dir = get_trace_dir() + if trace_dir is None: + return + + trace_dir.mkdir(parents=True, exist_ok=True) + screenshot_dir = trace_dir / "screens" + screenshot_dir.mkdir(parents=True, exist_ok=True) + step_id = next(_TRACE_COUNTER) + slug = _slugify_step(step) or "step" + + try: + title = await page.title() + except Exception as exc: # pragma: no cover - best effort diagnostics + title = f"" + + try: + body_text = await page.evaluate("() => document.body?.innerText || ''") + except Exception as exc: # pragma: no cover - best effort diagnostics + body_text = f"" + + if not isinstance(body_text, str): + body_text = "" + + try: + remember_me = (await page.locator("#rememberme-div").count()) > 0 + except Exception: # pragma: no cover - best effort diagnostics + remember_me = False + + try: + cookies = await page.context.cookies() + except Exception: # pragma: no cover - best effort diagnostics + cookies = [] + + linkedin_cookie_names = sorted( + { + cookie["name"] + for cookie in cookies + if "linkedin.com" in cookie.get("domain", "") + } + ) + + screenshot_path = screenshot_dir / f"{step_id:03d}-{slug}.png" + screenshot: str | None = None + try: + await page.screenshot(path=str(screenshot_path), full_page=True) + screenshot = str(screenshot_path) + except Exception as exc: # pragma: no cover - best effort diagnostics + screenshot = f"" + + payload = { + "step_id": step_id, + "step": step, + "url": getattr(page, "url", ""), + "title": title, + "remember_me": remember_me, + "body_length": len(body_text), + "body_marker": " ".join(body_text.split())[:200], + "linkedin_cookie_names": linkedin_cookie_names, + "screenshot": screenshot, + "extra": extra or {}, + } + + with (trace_dir / "trace.jsonl").open("a", encoding="utf-8") as fh: + fh.write(json.dumps(payload, ensure_ascii=True) + "\n") diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 01a44b9f..4b3058b5 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -21,6 +21,7 @@ ) from linkedin_mcp_server.config import get_config +from linkedin_mcp_server.debug_trace import record_page_trace from linkedin_mcp_server.session_state import ( SourceState, clear_runtime_profile, @@ -69,6 +70,16 @@ def _debug_skip_checkpoint_restart() -> bool: } +def _debug_bridge_every_startup() -> bool: + """Return whether to force a fresh bridge on every foreign-runtime startup.""" + return os.getenv("LINKEDIN_DEBUG_BRIDGE_EVERY_STARTUP", "").strip().lower() in { + "1", + "true", + "yes", + "on", + } + + def _apply_browser_settings(browser: BrowserManager) -> None: """Apply configuration settings to browser instance.""" config = get_config() @@ -124,20 +135,45 @@ async def _feed_auth_succeeds( wait_until="domcontentloaded", ) await _stabilize_navigation("feed navigation") + await record_page_trace( + browser.page, + "feed-after-goto", + extra={"allow_remember_me": allow_remember_me}, + ) if allow_remember_me: if await resolve_remember_me_prompt(browser.page): await _stabilize_navigation("remember-me resolution") + await record_page_trace( + browser.page, + "feed-after-remember-me", + extra={"allow_remember_me": allow_remember_me}, + ) barrier = await detect_auth_barrier_quick(browser.page) if barrier is not None: + await record_page_trace( + browser.page, + "feed-auth-barrier", + extra={"barrier": barrier}, + ) await _log_feed_failure_context(browser, barrier) return False return True except Exception as exc: if allow_remember_me and await resolve_remember_me_prompt(browser.page): await _stabilize_navigation("remember-me resolution after feed failure") + await record_page_trace( + browser.page, + "feed-after-remember-me-error-recovery", + extra={"error": f"{type(exc).__name__}: {exc}"}, + ) barrier = await detect_auth_barrier_quick(browser.page) if barrier is None: return True + await record_page_trace( + browser.page, + "feed-navigation-error", + extra={"error": f"{type(exc).__name__}: {exc}"}, + ) await _log_feed_failure_context(browser, str(exc), exc) return False @@ -212,21 +248,33 @@ async def _bridge_runtime_profile( profile_dir, launch_options=launch_options, viewport=viewport ) await browser.start() + await record_page_trace( + browser.page, + "bridge-browser-started", + extra={"profile_dir": str(profile_dir)}, + ) try: await browser.page.goto( "https://www.linkedin.com/feed/", wait_until="domcontentloaded" ) await _stabilize_navigation("pre-import feed navigation") + await record_page_trace(browser.page, "bridge-after-pre-import-feed") if not await browser.import_cookies(cookie_path): raise AuthenticationError( "Portable authentication could not be imported. Run with --login to create a fresh source session." ) await _stabilize_navigation("bridge cookie import") + await record_page_trace( + browser.page, + "bridge-after-cookie-import", + extra={"cookie_path": str(cookie_path)}, + ) if not await _feed_auth_succeeds(browser): raise AuthenticationError( "No authentication found. Run with --login to create a profile." ) await _stabilize_navigation("post-import feed validation") + await record_page_trace(browser.page, "bridge-after-feed-validation") if _debug_skip_checkpoint_restart(): logger.warning( "Skipping checkpoint restart for derived runtime profile %s " @@ -249,6 +297,11 @@ async def _bridge_runtime_profile( ) await reopened.start() await _stabilize_navigation("derived profile reopen") + await record_page_trace( + reopened.page, + "bridge-after-profile-reopen", + extra={"profile_dir": str(profile_dir)}, + ) try: if not await _feed_auth_succeeds(reopened): logger.warning( @@ -258,6 +311,7 @@ async def _bridge_runtime_profile( "Derived runtime validation failed; no automatic re-bridge will be attempted. Run with --login to create a fresh source session." ) await _stabilize_navigation("post-reopen feed validation") + await record_page_trace(reopened.page, "bridge-after-reopen-validation") write_runtime_state( runtime_id, source_state, @@ -342,9 +396,11 @@ async def get_or_create_browser( runtime_state is not None and runtime_state.source_login_generation == source_state.login_generation ) + force_bridge = _debug_bridge_every_startup() if ( - generation_matches + not force_bridge + and generation_matches and profile_exists(derived_profile_dir) and storage_state_path.exists() ): @@ -363,6 +419,12 @@ async def get_or_create_browser( _browser_cookie_export_path = None return _browser + if force_bridge: + logger.warning( + "Forcing a fresh bridge for %s on every startup " + "(LINKEDIN_DEBUG_BRIDGE_EVERY_STARTUP enabled)", + current_runtime_id, + ) logger.info( "Deriving runtime profile for %s from source generation %s", current_runtime_id, diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py new file mode 100644 index 00000000..4fc2b1c1 --- /dev/null +++ b/linkedin_mcp_server/error_diagnostics.py @@ -0,0 +1,174 @@ +"""Issue-ready diagnostics for scraper failures.""" + +from __future__ import annotations + +from dataclasses import asdict +from datetime import UTC, datetime +import json +import socket +from pathlib import Path +import re +from typing import Any + +from linkedin_mcp_server.debug_trace import get_trace_dir +from linkedin_mcp_server.session_state import ( + auth_root_dir, + get_runtime_id, + get_source_profile_dir, + load_runtime_state, + load_source_state, + portable_cookie_path, + runtime_profile_dir, + runtime_storage_state_path, +) + +ISSUE_URL = "https://github.com/stickerdaniel/linkedin-mcp-server/issues/new/choose" + + +def build_issue_diagnostics( + exception: Exception, + *, + context: str, + target_url: str | None = None, + section_name: str | None = None, +) -> dict[str, Any]: + """Write an issue-ready report and return structured diagnostics.""" + timestamp = _utcnow() + source_profile_dir = _safe_source_profile_dir() + current_runtime_id = get_runtime_id() + source_state = load_source_state(source_profile_dir) + runtime_state = load_runtime_state(current_runtime_id, source_profile_dir) + trace_dir = get_trace_dir() + log_path = trace_dir / "server.log" if trace_dir else None + issue_dir = trace_dir or (auth_root_dir(source_profile_dir) / "issue-reports") + issue_dir.mkdir(parents=True, exist_ok=True) + issue_path = ( + issue_dir + / f"{timestamp.replace(':', '').replace('-', '')}-{_slugify(context)}.md" + ) + + runtime_details = { + "hostname": socket.gethostname(), + "current_runtime_id": current_runtime_id, + "source_profile_dir": str(source_profile_dir), + "portable_cookie_path": str(portable_cookie_path(source_profile_dir)), + "source_state": asdict(source_state) if source_state else None, + "runtime_profile_dir": str( + runtime_profile_dir(current_runtime_id, source_profile_dir) + ), + "runtime_storage_state_path": str( + runtime_storage_state_path(current_runtime_id, source_profile_dir) + ), + "runtime_state": asdict(runtime_state) if runtime_state else None, + "trace_dir": str(trace_dir) if trace_dir else None, + "log_path": str(log_path) if log_path and log_path.exists() else None, + } + payload = { + "created_at": timestamp, + "context": context, + "section_name": section_name, + "target_url": target_url, + "error_type": type(exception).__name__, + "error_message": str(exception), + "runtime": runtime_details, + } + issue_template = _render_issue_template(payload) + issue_path.write_text(issue_template) + payload["issue_template_path"] = str(issue_path) + payload["issue_template"] = issue_template + return payload + + +def format_tool_error_with_diagnostics( + message: str, diagnostics: dict[str, Any] +) -> str: + """Append issue-report locations to a tool-facing error message.""" + lines = [message, "", "Diagnostics:"] + if diagnostics.get("issue_template_path"): + lines.append(f"- Issue template: {diagnostics['issue_template_path']}") + runtime = diagnostics.get("runtime") or {} + if runtime.get("trace_dir"): + lines.append(f"- Trace artifacts: {runtime['trace_dir']}") + if runtime.get("log_path"): + lines.append(f"- Server log: {runtime['log_path']}") + lines.append( + f"- Runtime: {runtime.get('current_runtime_id', 'unknown')} on {runtime.get('hostname', 'unknown')}" + ) + lines.append(f"- File the issue here: {ISSUE_URL}") + lines.append( + "- Read the generated issue template and attach the listed files to the GitHub issue." + ) + return "\n".join(lines) + + +def _render_issue_template(payload: dict[str, Any]) -> str: + runtime = payload["runtime"] + return ( + "\n".join( + [ + "# LinkedIn MCP scrape failure", + "", + "## File This Issue", + f"- GitHub issue link: {ISSUE_URL}", + "- Read this generated file before posting.", + "- Copy the Summary and Runtime sections into the GitHub issue.", + "- Attach this generated markdown file, the server log, and the trace artifacts directory.", + "", + "## Summary", + f"- Context: {payload['context']}", + f"- Section: {payload.get('section_name') or 'n/a'}", + f"- Target URL: {payload.get('target_url') or 'n/a'}", + f"- Error: {payload['error_type']}: {payload['error_message']}", + "", + "## Runtime", + f"- Hostname: {runtime['hostname']}", + f"- Current runtime: {runtime['current_runtime_id']}", + f"- Source profile: {runtime['source_profile_dir']}", + f"- Portable cookies: {runtime['portable_cookie_path']}", + f"- Derived runtime profile: {runtime['runtime_profile_dir']}", + f"- Derived storage-state: {runtime['runtime_storage_state_path']}", + f"- Trace artifacts: {runtime['trace_dir'] or 'not enabled'}", + f"- Server log: {runtime['log_path'] or 'not enabled'}", + "", + "## Session State", + "```json", + json.dumps( + { + "source_state": runtime["source_state"], + "runtime_state": runtime["runtime_state"], + }, + indent=2, + sort_keys=True, + ), + "```", + "", + "## Attachment Checklist", + "- Read this generated markdown file and use it as the issue body/context.", + "- Attach this generated markdown file itself.", + "- Attach the server log if available.", + "- Attach the trace screenshots/trace.jsonl if available.", + "", + "## Reproduction", + "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", + "2. Start the local Docker server with the same debug env vars used for this run.", + "3. Re-run the failing MCP tool call.", + ] + ) + + "\n" + ) + + +def _slugify(value: str) -> str: + slug = re.sub(r"[^a-z0-9]+", "-", value.lower()).strip("-") + return slug or "issue" + + +def _safe_source_profile_dir(): + try: + return get_source_profile_dir() + except BaseException: + return (Path.home() / ".linkedin-mcp" / "profile").expanduser() + + +def _utcnow() -> str: + return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 60781a6a..225fe748 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -26,10 +26,31 @@ LinkedInMCPError, SessionExpiredError, ) +from linkedin_mcp_server.error_diagnostics import ( + build_issue_diagnostics, + format_tool_error_with_diagnostics, +) logger = logging.getLogger(__name__) +def _raise_tool_error_with_diagnostics( + exception: Exception, + message: str, + *, + context: str, +) -> NoReturn: + try: + diagnostics = build_issue_diagnostics(exception, context=context) + except Exception: + logger.debug("Could not build issue diagnostics", exc_info=True) + diagnostics = None + + if diagnostics is not None: + message = format_tool_error_with_diagnostics(message, diagnostics) + raise ToolError(message) from exception + + def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: """ Raise a ToolError for known LinkedIn exceptions, or re-raise unknown ones. @@ -49,58 +70,78 @@ def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: if isinstance(exception, CredentialsNotFoundError): logger.warning("Credentials not found%s: %s", ctx, exception) - raise ToolError( - "Authentication not found. Run with --login to create a browser profile." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Authentication not found. Run with --login to create a browser profile.", + context=context, + ) elif isinstance(exception, SessionExpiredError): logger.warning("Session expired%s: %s", ctx, exception) - raise ToolError( - "Session expired. Run with --login to create a new browser profile." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Session expired. Run with --login to create a new browser profile.", + context=context, + ) elif isinstance(exception, AuthenticationError): logger.warning("Authentication failed%s: %s", ctx, exception) - raise ToolError( - "Authentication failed. Run with --login to re-authenticate." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Authentication failed. Run with --login to re-authenticate.", + context=context, + ) elif isinstance(exception, RateLimitError): wait_time = getattr(exception, "suggested_wait_time", 300) logger.warning("Rate limit%s: %s (wait=%ds)", ctx, exception, wait_time) - raise ToolError( - f"Rate limit detected. Wait {wait_time} seconds before trying again." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + f"Rate limit detected. Wait {wait_time} seconds before trying again.", + context=context, + ) elif isinstance(exception, ProfileNotFoundError): logger.warning("Profile not found%s: %s", ctx, exception) - raise ToolError( - "Profile not found. Check the profile URL is correct." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Profile not found. Check the profile URL is correct.", + context=context, + ) elif isinstance(exception, ElementNotFoundError): logger.warning("Element not found%s: %s", ctx, exception) - raise ToolError( - "Element not found. LinkedIn page structure may have changed." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Element not found. LinkedIn page structure may have changed.", + context=context, + ) elif isinstance(exception, NetworkError): logger.warning("Network error%s: %s", ctx, exception) - raise ToolError( - "Network error. Check your connection and try again." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Network error. Check your connection and try again.", + context=context, + ) elif isinstance(exception, ScrapingError): logger.warning("Scraping error%s: %s", ctx, exception) - raise ToolError( - "Scraping failed. LinkedIn page structure may have changed." - ) from exception + _raise_tool_error_with_diagnostics( + exception, + "Scraping failed. LinkedIn page structure may have changed.", + context=context, + ) elif isinstance(exception, (LinkedInScraperException, LinkedInMCPError)): # Catch-all for base exception types and any future subclasses # without a dedicated handler above. Passes through str(exception). logger.warning("LinkedIn error%s: %s", ctx, exception) - raise ToolError(str(exception)) from exception + _raise_tool_error_with_diagnostics( + exception, + str(exception), + context=context, + ) else: logger.error("Unexpected error%s: %s", ctx, exception, exc_info=True) diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 285e4cb3..1454569d 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -11,6 +11,8 @@ import logging from typing import Any, Dict +from linkedin_mcp_server.debug_trace import get_trace_dir + class MCPJSONFormatter(logging.Formatter): """JSON formatter for MCP server logs.""" @@ -107,6 +109,13 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) + trace_dir = get_trace_dir() + if trace_dir is not None: + trace_dir.mkdir(parents=True, exist_ok=True) + file_handler = logging.FileHandler(trace_dir / "server.log", encoding="utf-8") + file_handler.setFormatter(formatter) + root_logger.addHandler(file_handler) + # Set specific loggers to reduce noise logging.getLogger("urllib3").setLevel(logging.ERROR) logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 232508e0..76e39a55 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -19,6 +19,8 @@ AuthenticationError, LinkedInScraperException, ) +from linkedin_mcp_server.debug_trace import record_page_trace +from linkedin_mcp_server.error_diagnostics import build_issue_diagnostics from linkedin_mcp_server.core.utils import ( detect_rate_limit, handle_modal_close, @@ -133,6 +135,7 @@ class ExtractedSection: text: str references: list[Reference] + error: dict[str, Any] | None = None def strip_linkedin_noise(text: str) -> str: @@ -263,18 +266,46 @@ def record_navigation(frame: Any) -> None: self._page.on("framenavigated", record_navigation) try: + await record_page_trace( + self._page, + "extractor-before-goto", + extra={"target_url": url, "wait_until": wait_until}, + ) try: await self._page.goto(url, wait_until=wait_until, timeout=30000) await _stabilize_navigation(f"goto {url}") + await record_page_trace( + self._page, + "extractor-after-goto", + extra={"target_url": url, "wait_until": wait_until}, + ) except Exception as exc: if allow_remember_me and await resolve_remember_me_prompt(self._page): await _stabilize_navigation(f"remember-me resolution for {url}") + await record_page_trace( + self._page, + "extractor-after-remember-me", + extra={ + "target_url": url, + "error": f"{type(exc).__name__}: {exc}", + }, + ) await self._goto_with_auth_checks( url, wait_until=wait_until, allow_remember_me=False, ) return + await record_page_trace( + self._page, + "extractor-navigation-error", + extra={ + "target_url": url, + "wait_until": wait_until, + "error": f"{type(exc).__name__}: {exc}", + "hops": hops, + }, + ) await self._log_navigation_failure(url, wait_until, exc, hops) await self._raise_if_auth_barrier(url, navigation_error=exc) raise @@ -285,6 +316,11 @@ def record_navigation(frame: Any) -> None: if allow_remember_me and await resolve_remember_me_prompt(self._page): await _stabilize_navigation(f"remember-me retry for {url}") + await record_page_trace( + self._page, + "extractor-after-remember-me-retry", + extra={"target_url": url, "barrier": barrier}, + ) await self._goto_with_auth_checks( url, wait_until=wait_until, @@ -292,6 +328,11 @@ def record_navigation(frame: Any) -> None: ) return + await record_page_trace( + self._page, + "extractor-auth-barrier", + extra={"target_url": url, "barrier": barrier}, + ) logger.warning("Authentication barrier detected on %s: %s", url, barrier) raise AuthenticationError( "LinkedIn requires interactive re-authentication. " @@ -333,7 +374,16 @@ async def extract_page( raise except Exception as e: logger.warning("Failed to extract page %s: %s", url, e) - return ExtractedSection(text="", references=[]) + return ExtractedSection( + text="", + references=[], + error=build_issue_diagnostics( + e, + context="extract_page", + target_url=url, + section_name=section_name, + ), + ) async def _extract_page_once( self, @@ -422,7 +472,16 @@ async def _extract_overlay( raise except Exception as e: logger.warning("Failed to extract overlay %s: %s", url, e) - return ExtractedSection(text="", references=[]) + return ExtractedSection( + text="", + references=[], + error=build_issue_diagnostics( + e, + context="extract_overlay", + target_url=url, + section_name=section_name, + ), + ) async def _extract_overlay_once( self, @@ -475,6 +534,7 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A base_url = f"https://www.linkedin.com/in/{username}" sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} + section_errors: dict[str, dict[str, Any]] = {} first = True for section_name, (suffix, is_overlay) in PERSON_SECTIONS.items(): @@ -498,10 +558,18 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A sections[section_name] = extracted.text if extracted.references: references[section_name] = extracted.references + elif extracted.error: + section_errors[section_name] = extracted.error except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) + section_errors[section_name] = build_issue_diagnostics( + e, + context="scrape_person", + target_url=url, + section_name=section_name, + ) result: dict[str, Any] = { "url": f"{base_url}/", @@ -509,6 +577,8 @@ async def scrape_person(self, username: str, requested: set[str]) -> dict[str, A } if references: result["references"] = references + if section_errors: + result["section_errors"] = section_errors return result async def scrape_company( @@ -523,6 +593,7 @@ async def scrape_company( base_url = f"https://www.linkedin.com/company/{company_name}" sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} + section_errors: dict[str, dict[str, Any]] = {} first = True for section_name, (suffix, is_overlay) in COMPANY_SECTIONS.items(): @@ -546,10 +617,18 @@ async def scrape_company( sections[section_name] = extracted.text if extracted.references: references[section_name] = extracted.references + elif extracted.error: + section_errors[section_name] = extracted.error except LinkedInScraperException: raise except Exception as e: logger.warning("Error scraping section %s: %s", section_name, e) + section_errors[section_name] = build_issue_diagnostics( + e, + context="scrape_company", + target_url=url, + section_name=section_name, + ) result: dict[str, Any] = { "url": f"{base_url}/", @@ -557,6 +636,8 @@ async def scrape_company( } if references: result["references"] = references + if section_errors: + result["section_errors"] = section_errors return result async def scrape_job(self, job_id: str) -> dict[str, Any]: @@ -570,10 +651,13 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} + section_errors: dict[str, dict[str, Any]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["job_posting"] = extracted.text if extracted.references: references["job_posting"] = extracted.references + elif extracted.error: + section_errors["job_posting"] = extracted.error result: dict[str, Any] = { "url": url, @@ -581,6 +665,8 @@ async def scrape_job(self, job_id: str) -> dict[str, Any]: } if references: result["references"] = references + if section_errors: + result["section_errors"] = section_errors return result async def _extract_job_ids(self) -> list[str]: @@ -636,7 +722,16 @@ async def _extract_search_page( raise except Exception as e: logger.warning("Failed to extract search page %s: %s", url, e) - return ExtractedSection(text="", references=[]) + return ExtractedSection( + text="", + references=[], + error=build_issue_diagnostics( + e, + context="extract_search_page", + target_url=url, + section_name=section_name, + ), + ) async def _extract_search_page_once( self, @@ -791,6 +886,7 @@ async def search_jobs( seen_ids: set[str] = set() page_texts: list[str] = [] page_references: list[Reference] = [] + section_errors: dict[str, dict[str, Any]] = {} total_pages: int | None = None total_pages_queried = False @@ -815,6 +911,8 @@ async def search_jobs( ) if not extracted.text or extracted.text == _RATE_LIMITED_MSG: + if extracted.error: + section_errors["search_results"] = extracted.error # Navigation failed or rate-limited; skip ID extraction break @@ -864,6 +962,12 @@ async def search_jobs( raise except Exception as e: logger.warning("Error on search page %d: %s", page_num + 1, e) + section_errors["search_results"] = build_issue_diagnostics( + e, + context="search_jobs", + target_url=url, + section_name="search_results", + ) break result: dict[str, Any] = { @@ -877,6 +981,8 @@ async def search_jobs( result["references"] = { "search_results": dedupe_references(page_references, cap=15) } + if section_errors: + result["section_errors"] = section_errors return result async def search_people( @@ -898,10 +1004,13 @@ async def search_people( sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} + section_errors: dict[str, dict[str, Any]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["search_results"] = extracted.text if extracted.references: references["search_results"] = extracted.references + elif extracted.error: + section_errors["search_results"] = extracted.error result: dict[str, Any] = { "url": url, @@ -909,6 +1018,8 @@ async def search_people( } if references: result["references"] = references + if section_errors: + result["section_errors"] = section_errors return result async def _extract_root_content( diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 9e938e60..681b8ec5 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -112,10 +112,13 @@ async def get_company_posts( sections: dict[str, str] = {} references: dict[str, list[Reference]] = {} + section_errors: dict[str, dict[str, Any]] = {} if extracted.text and extracted.text != _RATE_LIMITED_MSG: sections["posts"] = extracted.text if extracted.references: references["posts"] = extracted.references + elif extracted.error: + section_errors["posts"] = extracted.error await ctx.report_progress(progress=100, total=100, message="Complete") @@ -125,6 +128,8 @@ async def get_company_posts( } if references: result["references"] = references + if section_errors: + result["section_errors"] = section_errors return result except Exception as e: diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 27ac71bc..f9a83ffd 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -299,6 +299,52 @@ async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( ).exists() +@pytest.mark.asyncio +async def test_debug_bridge_every_startup_skips_matching_committed_profile( + tmp_path, monkeypatch +): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + _write_runtime_state( + tmp_path, + "linux-amd64-container", + source_login_generation="gen-2", + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_EVERY_STARTUP", "1") + monkeypatch.setenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "1") + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=first_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + expected_profile = runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ) + assert result is first_browser + assert ctor.call_count == 1 + assert ctor.call_args.kwargs["user_data_dir"] == expected_profile + first_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) + first_browser.export_storage_state.assert_not_awaited() + + @pytest.mark.asyncio async def test_stale_derived_runtime_rebuilds_from_new_generation(tmp_path): _write_source_state( diff --git a/tests/test_scraping.py b/tests/test_scraping.py index b464cc82..ebe32269 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -18,9 +18,10 @@ def extracted( text: str, references: list[Reference] | None = None, + error: dict | None = None, ) -> ExtractedSection: """Create an ExtractedSection for tests.""" - return ExtractedSection(text=text, references=references or []) + return ExtractedSection(text=text, references=references or [], error=error) class TestBuildJobSearchUrl: @@ -199,12 +200,17 @@ async def test_extract_page_returns_empty_on_failure(self, mock_page): mock_page.goto = AsyncMock(side_effect=Exception("Network error")) extractor = LinkedInExtractor(mock_page) - result = await extractor.extract_page( - "https://www.linkedin.com/in/bad/", - section_name="main_profile", - ) + with patch( + "linkedin_mcp_server.scraping.extractor.build_issue_diagnostics", + return_value={"issue_template_path": "/tmp/issue.md"}, + ): + result = await extractor.extract_page( + "https://www.linkedin.com/in/bad/", + section_name="main_profile", + ) assert result.text == "" assert result.references == [] + assert result.error == {"issue_template_path": "/tmp/issue.md"} async def test_extract_page_raises_auth_error_for_account_picker(self, mock_page): mock_page.goto = AsyncMock(side_effect=Exception("net::ERR_TOO_MANY_REDIRECTS")) @@ -500,6 +506,30 @@ async def test_basic_info_only_visits_main_profile(self, mock_page): assert urls[0].endswith("/in/testuser/") assert set(result["sections"]) == {"main_profile"} + async def test_scrape_person_returns_section_errors(self, mock_page): + extractor = LinkedInExtractor(mock_page) + with ( + patch.object( + extractor, + "extract_page", + new_callable=AsyncMock, + side_effect=[ + extracted("profile text"), + extracted("", error={"issue_template_path": "/tmp/issue.md"}), + ], + ), + patch( + "linkedin_mcp_server.scraping.extractor.asyncio.sleep", + new_callable=AsyncMock, + ), + ): + result = await extractor.scrape_person("testuser", {"posts"}) + + assert result["sections"]["main_profile"] == "profile text" + assert ( + result["section_errors"]["posts"]["issue_template_path"] == "/tmp/issue.md" + ) + async def test_experience_education_visits_correct_urls(self, mock_page): extractor = LinkedInExtractor(mock_page) with ( @@ -673,6 +703,10 @@ async def extract_with_failure(url, *args, **kwargs): "extract_page", side_effect=extract_with_failure, ), + patch( + "linkedin_mcp_server.scraping.extractor.build_issue_diagnostics", + return_value={"issue_template_path": "/tmp/issue.md"}, + ), patch.object( extractor, "_extract_overlay", @@ -692,6 +726,9 @@ async def extract_with_failure(url, *args, **kwargs): assert "main_profile" in result["sections"] assert "education" in result["sections"] assert "experience" not in result["sections"] + assert result["section_errors"]["experience"]["issue_template_path"] == ( + "/tmp/issue.md" + ) async def test_rate_limited_sections_are_omitted(self, mock_page): extractor = LinkedInExtractor(mock_page) diff --git a/tests/test_tools.py b/tests/test_tools.py index da7db8fa..857c9a67 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -238,6 +238,28 @@ async def test_get_company_posts_omits_rate_limited_sentinel(self, mock_context) result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) assert result["sections"] == {} + async def test_get_company_posts_returns_section_errors(self, mock_context): + mock_extractor = MagicMock() + mock_extractor.extract_page = AsyncMock( + return_value=ExtractedSection( + text="", + references=[], + error={"issue_template_path": "/tmp/company-posts-issue.md"}, + ) + ) + + from linkedin_mcp_server.tools.company import register_company_tools + + mcp = FastMCP("test") + register_company_tools(mcp) + + tool_fn = await get_tool_fn(mcp, "get_company_posts") + result = await tool_fn("testcorp", mock_context, extractor=mock_extractor) + assert result["sections"] == {} + assert result["section_errors"]["posts"]["issue_template_path"] == ( + "/tmp/company-posts-issue.md" + ) + async def test_get_company_posts_omits_orphaned_references(self, mock_context): mock_extractor = MagicMock() mock_extractor.extract_page = AsyncMock( From 9fae3297d3ded8e26fb3a595b41300b2cafc7c1c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:07:39 +0100 Subject: [PATCH 496/565] refactor(auth): default docker fresh bridge --- .gitignore | 3 + AGENTS.md | 6 +- README.md | 10 ++- docs/docker-hub.md | 13 ++-- linkedin_mcp_server/authentication.py | 2 +- linkedin_mcp_server/cli_main.py | 51 ++++++++----- linkedin_mcp_server/core/browser.py | 31 +++++--- linkedin_mcp_server/drivers/browser.py | 67 ++++++++++++++++- linkedin_mcp_server/error_diagnostics.py | 90 +++++++++++++++++++++-- linkedin_mcp_server/scraping/extractor.py | 15 +++- tests/test_browser_driver.py | 88 +++++++++++++++++++--- tests/test_core_browser.py | 9 ++- 12 files changed, 316 insertions(+), 69 deletions(-) diff --git a/.gitignore b/.gitignore index d8eea1ea..1f618bcc 100644 --- a/.gitignore +++ b/.gitignore @@ -205,3 +205,6 @@ cookies.json # Local snapshot dumps (contain scraped LinkedIn data) scripts/snapshot_dumps/ + +# Debug artifacts +.debug/ diff --git a/AGENTS.md b/AGENTS.md index 3fd48c24..3ebfe212 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -99,9 +99,9 @@ Tools may also include: - Source runtime uses persistent browser profile at `~/.linkedin-mcp/profile/` - `--login` creates a new source login generation and exports `cookies.json` -- Foreign runtimes derive their own persistent profiles under `~/.linkedin-mcp/runtime-profiles//profile/` -- The first foreign-runtime bridge exports `storage-state.json`, performs a checkpoint restart, and only then marks the derived runtime profile reusable -- Derived runtime profiles are reused across restarts and rebuilt only after a new host `--login` +- Foreign runtimes derive their Linux bridge state under `~/.linkedin-mcp/runtime-profiles//profile/` +- By default, foreign runtimes fresh-bridge on every startup using the minimal working auth cookie subset and keep that bridged browser alive for the server lifetime +- Persistent derived runtime reuse remains experimental behind `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1` **Transport Modes:** diff --git a/README.md b/README.md index bd6d0540..245717e5 100644 --- a/README.md +++ b/README.md @@ -209,13 +209,15 @@ After login, the host writes: - portable cookies: `~/.linkedin-mcp/cookies.json` - source session metadata: `~/.linkedin-mcp/source-state.json` -The first Docker run derives a Linux runtime profile under: +Docker foreign runtimes derive a Linux runtime profile under: - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` -That first Docker run also performs an internal checkpoint restart after `/feed/` succeeds, so the derived Linux runtime session is committed immediately instead of depending on later browser shutdown. Later Docker runs reuse that committed Linux runtime profile directly instead of reconstructing the session from cookies on every startup. Running `--login` again creates a new source login generation, which causes the next Docker run to rebuild its Linux runtime profile once. +By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. This currently works more reliably than reusing a checkpointed derived runtime profile across restarts. + +If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. **Step 2: Configure Claude Desktop with Docker** @@ -235,7 +237,7 @@ That first Docker run also performs an internal checkpoint restart after `/feed/ ``` > [!NOTE] -> Docker now keeps its own persistent derived runtime profile after the first successful bridge and checkpoint restart. If you run `--login` again on the host, the next Docker startup rebuilds that derived runtime profile from the new source login generation. +> Docker now fresh-bridges by default on each startup. Persistent derived runtime reuse is still available behind `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`, but it remains experimental. > [!NOTE] > **Why can't I run `--login` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. @@ -303,7 +305,7 @@ Runtime server logs are emitted by FastMCP/Uvicorn. - Make sure you have only one active LinkedIn session at a time - LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` - You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. -- If Docker auth becomes stale after you re-login on the host, restart Docker once so it can rebuild its derived runtime profile from the new source login generation. +- If Docker auth becomes stale after you re-login on the host, restart Docker once so it can fresh-bridge from the new source session generation. **Timeout issues:** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index f90c1bac..ef6be407 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -20,28 +20,24 @@ Create a browser profile locally, then mount it into Docker. **Step 1: Create profile on the host (one-time setup)** ```bash -# Installed package usage uvx linkedin-scraper-mcp --login - -# Local development from this repo -uv run -m linkedin_mcp_server --login ``` -If you are debugging or verifying code changes in this repository, prefer `uv run -m linkedin_mcp_server ...` so the running process matches your workspace files. Use `uvx` when intentionally testing the packaged distribution. - This creates the source session artifacts on the host: - `~/.linkedin-mcp/profile/` - `~/.linkedin-mcp/cookies.json` - `~/.linkedin-mcp/source-state.json` -The first Docker run derives a persistent Linux runtime profile under: +Docker foreign runtimes derive a Linux runtime profile under: - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` - `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` -That first Docker run also performs an internal checkpoint restart after `/feed/` succeeds, so the derived Linux runtime session is committed immediately instead of depending on later browser shutdown. Later Docker runs reuse that committed Linux runtime profile directly. Re-running `--login` on the host creates a new source login generation, and the next Docker run rebuilds its derived Linux profile once. +By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. + +If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. **Step 2: Configure Claude Desktop with Docker** @@ -83,6 +79,7 @@ That first Docker run also performs an internal checkpoint restart after `/feed/ | `SLOW_MO` | `0` | Delay between browser actions in ms (debugging) | | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | | `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | +| `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION` | `false` | Experimental: reuse checkpointed derived Linux runtime profiles across Docker restarts instead of fresh-bridging each startup | **Example with custom timeout:** diff --git a/linkedin_mcp_server/authentication.py b/linkedin_mcp_server/authentication.py index 91500c2e..bdd8c30d 100644 --- a/linkedin_mcp_server/authentication.py +++ b/linkedin_mcp_server/authentication.py @@ -52,7 +52,7 @@ def get_authentication_source() -> bool: " 1. Run with --login to create a source browser profile (recommended)\n" " 2. Run with --no-headless to login interactively\n\n" "For Docker users:\n" - " Create profile on host first: uvx linkedin-scraper-mcp --login\n" + " Create profile on host first: uv run -m linkedin_mcp_server --login\n" " Then mount into Docker: -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp" ) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 1692d9df..471695ef 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -25,6 +25,7 @@ get_or_create_browser, get_profile_dir, profile_exists, + _experimental_persist_derived_runtime, set_headless, ) from linkedin_mcp_server.exceptions import CredentialsNotFoundError @@ -165,23 +166,32 @@ def profile_info_and_exit() -> None: runtime_state = load_runtime_state(current_runtime, profile_dir) runtime_profile = runtime_profile_dir(current_runtime, profile_dir) runtime_storage_state = runtime_storage_state_path(current_runtime, profile_dir) - if ( - runtime_state - and runtime_state.source_login_generation == source_state.login_generation - and profile_exists(runtime_profile) - and runtime_storage_state.exists() - ): + if not _experimental_persist_derived_runtime(): + bridge_required = True + print("Profile mode: foreign runtime (fresh bridge each startup)") + if runtime_profile.exists(): + print( + f"Derived runtime cache present but ignored by default: {runtime_profile}" + ) + else: + if ( + runtime_state + and runtime_state.source_login_generation + == source_state.login_generation + and profile_exists(runtime_profile) + and runtime_storage_state.exists() + ): + print( + f"Profile mode: derived (committed, current generation) ({runtime_profile})" + ) + else: + bridge_required = True + state = "stale generation" if runtime_state else "missing" + print(f"Profile mode: derived ({state})") print( - f"Profile mode: derived (committed, current generation) ({runtime_profile})" + "Storage snapshot: " + f"{runtime_storage_state if runtime_storage_state and runtime_storage_state.exists() else 'missing'}" ) - else: - bridge_required = True - state = "stale generation" if runtime_state else "missing" - print(f"Profile mode: derived ({state})") - print( - "Storage snapshot: " - f"{runtime_storage_state if runtime_storage_state and runtime_storage_state.exists() else 'missing'}" - ) async def check_session() -> bool: try: @@ -197,9 +207,14 @@ async def check_session() -> bool: await close_browser() if bridge_required: - print( - "โ„น๏ธ A derived runtime profile will be created and checkpoint-committed on the next server startup." - ) + if _experimental_persist_derived_runtime(): + print( + "โ„น๏ธ A derived runtime profile will be created and checkpoint-committed on the next server startup." + ) + else: + print( + "โ„น๏ธ A fresh bridged foreign-runtime session will be created on the next server startup." + ) sys.exit(0) try: diff --git a/linkedin_mcp_server/core/browser.py b/linkedin_mcp_server/core/browser.py index fa1845d9..19df362d 100644 --- a/linkedin_mcp_server/core/browser.py +++ b/linkedin_mcp_server/core/browser.py @@ -246,24 +246,33 @@ async def export_storage_state( } @classmethod - def _bridge_cookie_names(cls) -> frozenset[str]: + def _bridge_cookie_names( + cls, preset_name: str | None = None + ) -> tuple[str, frozenset[str]]: preset_name = ( - os.getenv( + preset_name + or os.getenv( "LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", - "bridge_core", + "auth_minimal", ).strip() - or "bridge_core" + or "auth_minimal" ) preset = cls._BRIDGE_COOKIE_PRESETS.get(preset_name) if preset is None: logger.warning( - "Unknown LINKEDIN_DEBUG_BRIDGE_COOKIE_SET=%r, falling back to bridge_core", + "Unknown LINKEDIN_DEBUG_BRIDGE_COOKIE_SET=%r, falling back to auth_minimal", preset_name, ) - return cls._BRIDGE_COOKIE_PRESETS["bridge_core"] - return preset + preset_name = "auth_minimal" + preset = cls._BRIDGE_COOKIE_PRESETS[preset_name] + return preset_name, preset - async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: + async def import_cookies( + self, + cookie_path: str | Path | None = None, + *, + preset_name: str | None = None, + ) -> bool: """Import the portable LinkedIn bridge cookie subset. Fresh browser-side cookies are preserved. The imported subset is the @@ -285,7 +294,9 @@ async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: logger.debug("Cookie file is empty") return False - bridge_cookie_names = self._bridge_cookie_names() + resolved_preset_name, bridge_cookie_names = self._bridge_cookie_names( + preset_name + ) cookies = [ self._normalize_cookie_domain(c) @@ -304,7 +315,7 @@ async def import_cookies(self, cookie_path: str | Path | None = None) -> bool: "Imported %d LinkedIn bridge cookies from %s (preset=%s, li_at=%s): %s", len(cookies), path, - os.getenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "bridge_core"), + resolved_preset_name, has_li_at, ", ".join(c["name"] for c in cookies), ) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 4b3058b5..477a8b08 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -48,9 +48,22 @@ _NAV_STABILIZE_DELAY_SECONDS = 5.0 +def _debug_stabilize_navigation_enabled() -> bool: + """Return whether debug-only startup stabilization sleeps are enabled.""" + return os.getenv("LINKEDIN_DEBUG_STABILIZE_NAVIGATION", "").strip().lower() in { + "1", + "true", + "yes", + "on", + } + + async def _stabilize_navigation(label: str) -> None: """Pause between LinkedIn startup actions to rule out timing issues.""" - if os.environ.get("PYTEST_CURRENT_TEST"): + if ( + os.environ.get("PYTEST_CURRENT_TEST") + or not _debug_stabilize_navigation_enabled() + ): return logger.debug( "Stabilizing navigation for %.1fs after %s", @@ -80,6 +93,18 @@ def _debug_bridge_every_startup() -> bool: } +def _experimental_persist_derived_runtime() -> bool: + """Return whether Docker-style foreign runtimes should reuse derived profiles.""" + return os.getenv( + "LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "" + ).strip().lower() in { + "1", + "true", + "yes", + "on", + } + + def _apply_browser_settings(browser: BrowserManager) -> None: """Apply configuration settings to browser instance.""" config = get_config() @@ -238,6 +263,8 @@ async def _bridge_runtime_profile( runtime_id: str, launch_options: dict[str, str], viewport: dict[str, int], + persist_runtime: bool, + cookie_preset: str = "auth_minimal", ) -> BrowserManager: clear_runtime_profile(runtime_id, get_source_profile_dir()) profile_dir.parent.mkdir(parents=True, exist_ok=True) @@ -259,7 +286,7 @@ async def _bridge_runtime_profile( ) await _stabilize_navigation("pre-import feed navigation") await record_page_trace(browser.page, "bridge-after-pre-import-feed") - if not await browser.import_cookies(cookie_path): + if not await browser.import_cookies(cookie_path, preset_name=cookie_preset): raise AuthenticationError( "Portable authentication could not be imported. Run with --login to create a fresh source session." ) @@ -275,6 +302,14 @@ async def _bridge_runtime_profile( ) await _stabilize_navigation("post-import feed validation") await record_page_trace(browser.page, "bridge-after-feed-validation") + if not persist_runtime: + logger.info( + "Foreign runtime %s authenticated via fresh bridge " + "(derived runtime persistence disabled)", + runtime_id, + ) + browser.is_authenticated = True + return browser if _debug_skip_checkpoint_restart(): logger.warning( "Skipping checkpoint restart for derived runtime profile %s " @@ -387,6 +422,30 @@ async def get_or_create_browser( _browser_cookie_export_path = cookie_path return _browser + persist_runtime = _experimental_persist_derived_runtime() + force_bridge = _debug_bridge_every_startup() + + if not persist_runtime: + logger.info( + "Using fresh bridge for foreign runtime %s " + "(derived runtime persistence disabled by default)", + current_runtime_id, + ) + browser = await _bridge_runtime_profile( + runtime_profile_dir(current_runtime_id, source_profile_dir), + cookie_path=cookie_path, + source_state=source_state, + runtime_id=current_runtime_id, + launch_options=launch_options, + viewport=viewport, + persist_runtime=False, + cookie_preset="auth_minimal", + ) + _apply_browser_settings(browser) + _browser = browser + _browser_cookie_export_path = None + return _browser + runtime_state = load_runtime_state(current_runtime_id, source_profile_dir) derived_profile_dir = runtime_profile_dir(current_runtime_id, source_profile_dir) storage_state_path = runtime_storage_state_path( @@ -396,8 +455,6 @@ async def get_or_create_browser( runtime_state is not None and runtime_state.source_login_generation == source_state.login_generation ) - force_bridge = _debug_bridge_every_startup() - if ( not force_bridge and generation_matches @@ -437,6 +494,8 @@ async def get_or_create_browser( runtime_id=current_runtime_id, launch_options=launch_options, viewport=viewport, + persist_runtime=True, + cookie_preset="auth_minimal", ) _apply_browser_settings(browser) _browser = browser diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 4fc2b1c1..94e57499 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -23,6 +23,7 @@ ) ISSUE_URL = "https://github.com/stickerdaniel/linkedin-mcp-server/issues/new/choose" +ISSUE_TITLE_PREFIX = "[BUG]" def build_issue_diagnostics( @@ -46,6 +47,7 @@ def build_issue_diagnostics( issue_dir / f"{timestamp.replace(':', '').replace('-', '')}-{_slugify(context)}.md" ) + gist_command = _build_gist_command(issue_dir, issue_path, log_path) runtime_details = { "hostname": socket.gethostname(), @@ -62,6 +64,7 @@ def build_issue_diagnostics( "runtime_state": asdict(runtime_state) if runtime_state else None, "trace_dir": str(trace_dir) if trace_dir else None, "log_path": str(log_path) if log_path and log_path.exists() else None, + "suggested_gist_command": gist_command, } payload = { "created_at": timestamp, @@ -71,6 +74,12 @@ def build_issue_diagnostics( "error_type": type(exception).__name__, "error_message": str(exception), "runtime": runtime_details, + "suggested_issue_title": _suggest_issue_title( + context=context, + section_name=section_name, + target_url=target_url, + current_runtime_id=current_runtime_id, + ), } issue_template = _render_issue_template(payload) issue_path.write_text(issue_template) @@ -91,6 +100,8 @@ def format_tool_error_with_diagnostics( lines.append(f"- Trace artifacts: {runtime['trace_dir']}") if runtime.get("log_path"): lines.append(f"- Server log: {runtime['log_path']}") + if runtime.get("suggested_gist_command"): + lines.append(f"- Suggested gist command: {runtime['suggested_gist_command']}") lines.append( f"- Runtime: {runtime.get('current_runtime_id', 'unknown')} on {runtime.get('hostname', 'unknown')}" ) @@ -110,17 +121,44 @@ def _render_issue_template(payload: dict[str, Any]) -> str: "", "## File This Issue", f"- GitHub issue link: {ISSUE_URL}", + f"- Suggested title: {payload['suggested_issue_title']}", "- Read this generated file before posting.", - "- Copy the Summary and Runtime sections into the GitHub issue.", + "- Copy the sections below into the GitHub bug report template.", "- Attach this generated markdown file, the server log, and the trace artifacts directory.", "", - "## Summary", - f"- Context: {payload['context']}", - f"- Section: {payload.get('section_name') or 'n/a'}", - f"- Target URL: {payload.get('target_url') or 'n/a'}", - f"- Error: {payload['error_type']}: {payload['error_message']}", + "## Installation Method", + "- [x] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:latest` with local repo mounted into `/app`", + "- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._", + "- [ ] Local Python setup", + "", + "## When does the error occur?", + "- [ ] At startup", + "- [x] During tool call (specify which tool):", + " - [x] get_person_profile", + " - [ ] get_company_profile", + " - [ ] get_job_details", + " - [ ] search_jobs", + " - [ ] close_session", + "", + "## MCP Client Configuration", + "", + "**Client used for reproduction**:", + "```text", + "Local curl-based MCP HTTP client against the server's streamable-http transport", + "```", + "", + "## MCP Client Logs", + "```text", + "See attached server log and trace artifacts.", + "```", + "", + "## Error Description", + f"Context: {payload['context']}", + f"Section: {payload.get('section_name') or 'n/a'}", + f"Target URL: {payload.get('target_url') or 'n/a'}", + f"Error: {payload['error_type']}: {payload['error_message']}", "", - "## Runtime", + "## Runtime Diagnostics", f"- Hostname: {runtime['hostname']}", f"- Current runtime: {runtime['current_runtime_id']}", f"- Source profile: {runtime['source_profile_dir']}", @@ -129,6 +167,7 @@ def _render_issue_template(payload: dict[str, Any]) -> str: f"- Derived storage-state: {runtime['runtime_storage_state_path']}", f"- Trace artifacts: {runtime['trace_dir'] or 'not enabled'}", f"- Server log: {runtime['log_path'] or 'not enabled'}", + f"- Suggested gist command: {runtime['suggested_gist_command'] or 'not available'}", "", "## Session State", "```json", @@ -147,6 +186,12 @@ def _render_issue_template(payload: dict[str, Any]) -> str: "- Attach this generated markdown file itself.", "- Attach the server log if available.", "- Attach the trace screenshots/trace.jsonl if available.", + "- Optional: run the suggested gist command below to upload the text artifacts as a single shareable bundle.", + "", + "## Suggested Gist Command", + "```bash", + runtime["suggested_gist_command"] or "# gist command unavailable", + "```", "", "## Reproduction", "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", @@ -170,5 +215,36 @@ def _safe_source_profile_dir(): return (Path.home() / ".linkedin-mcp" / "profile").expanduser() +def _suggest_issue_title( + *, + context: str, + section_name: str | None, + target_url: str | None, + current_runtime_id: str, +) -> str: + section = section_name or "unknown-section" + route = target_url or context + if "/recent-activity/" in route: + summary = f"recent-activity redirect loop in {section} on {current_runtime_id}" + else: + summary = f"{section} scrape failure in {context} on {current_runtime_id}" + return f"{ISSUE_TITLE_PREFIX} {summary}" + + +def _build_gist_command( + issue_dir: Path, + issue_path: Path, + log_path: Path | None, +) -> str: + trace_path = issue_dir / "trace.jsonl" + files = [str(issue_path)] + if log_path is not None: + files.append(str(log_path)) + if trace_path.exists(): + files.append(str(trace_path)) + quoted = " ".join(f'"{path}"' for path in files) + return f'gh gist create {quoted} -d "LinkedIn MCP debug artifacts"' + + def _utcnow() -> str: return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 76e39a55..315aac12 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -84,9 +84,22 @@ _SORT_BY_MAP = {"date": "DD", "relevance": "R"} +def _debug_stabilize_navigation_enabled() -> bool: + """Return whether debug-only scraper stabilization sleeps are enabled.""" + return os.getenv("LINKEDIN_DEBUG_STABILIZE_NAVIGATION", "").strip().lower() in { + "1", + "true", + "yes", + "on", + } + + async def _stabilize_navigation(label: str) -> None: """Pause between LinkedIn navigations to rule out timing issues.""" - if os.environ.get("PYTEST_CURRENT_TEST"): + if ( + os.environ.get("PYTEST_CURRENT_TEST") + or not _debug_stabilize_navigation_enabled() + ): return logger.debug( "Stabilizing navigation for %.1fs after %s", diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index f9a83ffd..c9175ae0 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -180,10 +180,13 @@ async def test_same_runtime_clicks_remember_me_during_feed_validation(tmp_path): @pytest.mark.asyncio -async def test_derived_runtime_reuses_matching_committed_profile(tmp_path): +async def test_experimental_derived_runtime_reuses_matching_committed_profile( + tmp_path, monkeypatch +): _write_source_state(tmp_path, runtime_id="macos-arm64-host") derived_profile = _write_runtime_state(tmp_path, "linux-amd64-container") derived_browser = _make_mock_browser() + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") with ( patch( @@ -209,13 +212,63 @@ async def test_derived_runtime_reuses_matching_committed_profile(tmp_path): @pytest.mark.asyncio -async def test_missing_derived_runtime_bridges_and_checkpoint_commits(tmp_path): +async def test_default_foreign_runtime_bridges_fresh_each_startup(tmp_path): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + _write_runtime_state( + tmp_path, + "linux-amd64-container", + source_login_generation="gen-2", + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=first_browser, + ) as ctor, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + result = await get_or_create_browser() + + expected_profile = runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ) + assert result is first_browser + assert ctor.call_count == 1 + assert ctor.call_args.kwargs["user_data_dir"] == expected_profile + first_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile"), + preset_name="auth_minimal", + ) + first_browser.export_storage_state.assert_not_awaited() + first_browser.close.assert_not_awaited() + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + + +@pytest.mark.asyncio +async def test_experimental_missing_derived_runtime_bridges_and_checkpoint_commits( + tmp_path, monkeypatch +): _write_source_state( tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" ) first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) reopened_browser = _make_mock_browser() + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") with ( patch( @@ -245,7 +298,8 @@ async def test_missing_derived_runtime_bridges_and_checkpoint_commits(tmp_path): assert ctor.call_args_list[0].kwargs["user_data_dir"] == expected_profile assert ctor.call_args_list[1].kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile") + portable_cookie_path(tmp_path / "profile"), + preset_name="auth_minimal", ) first_browser.export_storage_state.assert_awaited_once_with( expected_storage, @@ -268,6 +322,7 @@ async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( ) first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") monkeypatch.setenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "1") with ( @@ -290,7 +345,8 @@ async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( assert result is first_browser assert ctor.call_count == 1 first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile") + portable_cookie_path(tmp_path / "profile"), + preset_name="auth_minimal", ) first_browser.export_storage_state.assert_not_awaited() first_browser.close.assert_not_awaited() @@ -313,6 +369,7 @@ async def test_debug_bridge_every_startup_skips_matching_committed_profile( ) first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_EVERY_STARTUP", "1") monkeypatch.setenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "1") @@ -340,13 +397,16 @@ async def test_debug_bridge_every_startup_skips_matching_committed_profile( assert ctor.call_count == 1 assert ctor.call_args.kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile") + portable_cookie_path(tmp_path / "profile"), + preset_name="auth_minimal", ) first_browser.export_storage_state.assert_not_awaited() @pytest.mark.asyncio -async def test_stale_derived_runtime_rebuilds_from_new_generation(tmp_path): +async def test_experimental_stale_derived_runtime_rebuilds_from_new_generation( + tmp_path, monkeypatch +): _write_source_state( tmp_path, runtime_id="macos-arm64-host", login_generation="gen-3" ) @@ -360,6 +420,7 @@ async def test_stale_derived_runtime_rebuilds_from_new_generation(tmp_path): first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) reopened_browser = _make_mock_browser() + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") with ( patch( @@ -386,12 +447,15 @@ async def test_stale_derived_runtime_rebuilds_from_new_generation(tmp_path): @pytest.mark.asyncio -async def test_matching_derived_runtime_failure_does_not_fallback_to_bridge(tmp_path): +async def test_experimental_matching_derived_runtime_failure_does_not_fallback_to_bridge( + tmp_path, monkeypatch +): from linkedin_mcp_server.core import AuthenticationError _write_source_state(tmp_path, runtime_id="macos-arm64-host") _write_runtime_state(tmp_path, "linux-amd64-container") invalid_browser = _make_mock_browser() + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") with ( patch( @@ -415,7 +479,9 @@ async def test_matching_derived_runtime_failure_does_not_fallback_to_bridge(tmp_ @pytest.mark.asyncio -async def test_checkpoint_reopen_failure_clears_runtime_dir(tmp_path): +async def test_experimental_checkpoint_reopen_failure_clears_runtime_dir( + tmp_path, monkeypatch +): from linkedin_mcp_server.core import AuthenticationError _write_source_state( @@ -424,6 +490,7 @@ async def test_checkpoint_reopen_failure_clears_runtime_dir(tmp_path): first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) reopened_browser = _make_mock_browser() + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") barrier_mock = AsyncMock(side_effect=[None, "checkpoint"]) with ( @@ -452,7 +519,9 @@ async def test_checkpoint_reopen_failure_clears_runtime_dir(tmp_path): @pytest.mark.asyncio -async def test_bridge_validation_failure_before_commit_clears_runtime_dir(tmp_path): +async def test_experimental_bridge_validation_failure_before_commit_clears_runtime_dir( + tmp_path, monkeypatch +): from linkedin_mcp_server.core import AuthenticationError _write_source_state( @@ -460,6 +529,7 @@ async def test_bridge_validation_failure_before_commit_clears_runtime_dir(tmp_pa ) first_browser = _make_mock_browser() first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") barrier_mock = AsyncMock(return_value="login title: linkedin login") with ( diff --git a/tests/test_core_browser.py b/tests/test_core_browser.py index 95df10e8..f634b625 100644 --- a/tests/test_core_browser.py +++ b/tests/test_core_browser.py @@ -40,6 +40,7 @@ async def test_import_cookies_imports_bridge_subset_only(tmp_path): _make_cookie("li_at"), _make_cookie("JSESSIONID"), _make_cookie("bcookie"), + _make_cookie("bscookie"), _make_cookie("lidc"), _make_cookie("session", domain=".example.com"), _make_cookie("timezone"), @@ -51,12 +52,12 @@ async def test_import_cookies_imports_bridge_subset_only(tmp_path): assert imported is True context.clear_cookies.assert_not_awaited() context.add_cookies.assert_awaited_once_with( - [cookies[0], cookies[1], cookies[2], cookies[3], cookies[5]] + [cookies[0], cookies[1], cookies[2], cookies[3], cookies[4]] ) @pytest.mark.asyncio -async def test_import_cookies_uses_auth_minimal_debug_preset(tmp_path, monkeypatch): +async def test_import_cookies_uses_bridge_core_debug_preset(tmp_path, monkeypatch): browser, context = _make_browser_manager(tmp_path) cookie_path = tmp_path / "cookies.json" cookies = [ @@ -69,12 +70,12 @@ async def test_import_cookies_uses_auth_minimal_debug_preset(tmp_path, monkeypat _make_cookie("timezone"), ] cookie_path.write_text(json.dumps(cookies)) - monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "auth_minimal") + monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "bridge_core") imported = await browser.import_cookies(cookie_path) assert imported is True - context.add_cookies.assert_awaited_once_with(cookies[:5]) + context.add_cookies.assert_awaited_once_with(cookies) @pytest.mark.asyncio From 917e0ef37ab25f5d798db6b0d15d849afe3e9e0b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:16:12 +0100 Subject: [PATCH 497/565] feat(debug): keep traces on failures --- AGENTS.md | 2 + README.md | 4 +- docs/docker-hub.md | 3 + linkedin_mcp_server/cli_main.py | 142 ++++++++++++----------- linkedin_mcp_server/debug_trace.py | 99 ++++++++++++++-- linkedin_mcp_server/error_diagnostics.py | 84 +++++++++++++- linkedin_mcp_server/logging_config.py | 28 ++++- tests/test_debug_trace.py | 64 ++++++++++ tests/test_error_diagnostics.py | 78 +++++++++++++ 9 files changed, 421 insertions(+), 83 deletions(-) create mode 100644 tests/test_debug_trace.py create mode 100644 tests/test_error_diagnostics.py diff --git a/AGENTS.md b/AGENTS.md index 3ebfe212..07e91b57 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -184,6 +184,8 @@ After the workflow completes, file a PR in the MCP registry to update the versio ## Important Development Notes +Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or working on this repository, and strictly follow its guidelines and checklists. + ### Development Workflow - Never sign a PR or commit with Claude Code diff --git a/README.md b/README.md index 245717e5..899efd66 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company Tool responses keep readable `sections` text and may also include a compact `references` map keyed by section. Each reference includes a typed target, a relative LinkedIn path (or absolute external URL), and a short label/context when available. -When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, runtime/session details, trace and log locations when enabled, and an issue-ready markdown template path. +When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, runtime/session details, trace/log locations, matching-open-issue hints when available, and an issue-ready markdown template path. > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 @@ -217,6 +217,8 @@ Docker foreign runtimes derive a Linux runtime profile under: By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. This currently works more reliably than reusing a checkpointed derived runtime profile across restarts. +Runtime traces/logs are captured into an ephemeral run directory by default and are automatically preserved only when a scrape failure occurs. Set `LINKEDIN_TRACE_MODE=always` to keep every run or `LINKEDIN_TRACE_MODE=off` to disable trace persistence entirely. + If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. **Step 2: Configure Claude Desktop with Docker** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index ef6be407..9db753de 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -37,6 +37,8 @@ Docker foreign runtimes derive a Linux runtime profile under: By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. +Runtime traces/logs are captured into an ephemeral run directory by default and are automatically preserved only when a scrape failure occurs. Set `LINKEDIN_TRACE_MODE=always` to keep every run or `LINKEDIN_TRACE_MODE=off` to disable trace persistence entirely. + If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. **Step 2: Configure Claude Desktop with Docker** @@ -80,6 +82,7 @@ If you want to experiment with persistent derived runtime reuse anyway, set `LIN | `VIEWPORT` | `1280x720` | Browser viewport size as WIDTHxHEIGHT | | `CHROME_PATH` | - | Path to Chrome/Chromium executable (rarely needed in Docker) | | `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION` | `false` | Experimental: reuse checkpointed derived Linux runtime profiles across Docker restarts instead of fresh-bridging each startup | +| `LINKEDIN_TRACE_MODE` | `on_error` | Trace/log retention mode: `on_error` keeps ephemeral artifacts only when a failure occurs, `always` keeps every run, `off` disables trace persistence | **Example with custom timeout:** diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 471695ef..3c41f6d1 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -28,8 +28,9 @@ _experimental_persist_derived_runtime, set_headless, ) +from linkedin_mcp_server.debug_trace import should_keep_traces from linkedin_mcp_server.exceptions import CredentialsNotFoundError -from linkedin_mcp_server.logging_config import configure_logging +from linkedin_mcp_server.logging_config import configure_logging, teardown_trace_logging from linkedin_mcp_server.session_state import ( get_runtime_id, load_runtime_state, @@ -317,84 +318,87 @@ def main() -> None: logger.info(f"LinkedIn MCP Server v{version}") - # Set headless mode from config - set_headless(config.browser.headless) - - # Handle --logout flag - if config.server.logout: - clear_profile_and_exit() + try: + # Set headless mode from config + set_headless(config.browser.headless) - # Handle --login flag - if config.server.login: - get_profile_and_exit() + # Handle --logout flag + if config.server.logout: + clear_profile_and_exit() - # Handle --status flag - if config.server.status: - profile_info_and_exit() + # Handle --login flag + if config.server.login: + get_profile_and_exit() - logger.debug(f"Server configuration: {config}") + # Handle --status flag + if config.server.status: + profile_info_and_exit() - # Phase 1: Ensure Authentication is Ready - try: - ensure_authentication_ready() - if config.is_interactive: - print("โœ… Authentication ready") - logger.info("Authentication ready") - - except CredentialsNotFoundError as e: - logger.error(f"Authentication setup failed: {e}") - if config.is_interactive: - print("\nโŒ Authentication required") - print(str(e)) - sys.exit(1) + logger.debug(f"Server configuration: {config}") - except KeyboardInterrupt: - if config.is_interactive: - print("\n\n๐Ÿ‘‹ Setup cancelled by user") - sys.exit(0) + # Phase 1: Ensure Authentication is Ready + try: + ensure_authentication_ready() + if config.is_interactive: + print("โœ… Authentication ready") + logger.info("Authentication ready") + + except CredentialsNotFoundError as e: + logger.error(f"Authentication setup failed: {e}") + if config.is_interactive: + print("\nโŒ Authentication required") + print(str(e)) + sys.exit(1) + + except KeyboardInterrupt: + if config.is_interactive: + print("\n\n๐Ÿ‘‹ Setup cancelled by user") + sys.exit(0) - except (AuthenticationError, RateLimitError) as e: - logger.error(f"LinkedIn error during setup: {e}") - if config.is_interactive: - print(f"\nโŒ {str(e)}") - sys.exit(1) + except (AuthenticationError, RateLimitError) as e: + logger.error(f"LinkedIn error during setup: {e}") + if config.is_interactive: + print(f"\nโŒ {str(e)}") + sys.exit(1) - except Exception as e: - logger.exception(f"Unexpected error during authentication setup: {e}") - if config.is_interactive: - print(f"\nโŒ Setup failed: {e}") - sys.exit(1) + except Exception as e: + logger.exception(f"Unexpected error during authentication setup: {e}") + if config.is_interactive: + print(f"\nโŒ Setup failed: {e}") + sys.exit(1) - # Phase 2: Server Runtime - try: - transport = config.server.transport - - # Prompt for transport in interactive mode if not explicitly set - if config.is_interactive and not config.server.transport_explicitly_set: - print("\n๐Ÿš€ Server ready! Choose transport mode:") - transport = choose_transport_interactive() - - # Create and run the MCP server - mcp = create_mcp_server() - - if transport == "streamable-http": - mcp.run( - transport=transport, - host=config.server.host, - port=config.server.port, - path=config.server.path, - ) - else: - mcp.run(transport=transport) + # Phase 2: Server Runtime + try: + transport = config.server.transport + + # Prompt for transport in interactive mode if not explicitly set + if config.is_interactive and not config.server.transport_explicitly_set: + print("\n๐Ÿš€ Server ready! Choose transport mode:") + transport = choose_transport_interactive() + + # Create and run the MCP server + mcp = create_mcp_server() + + if transport == "streamable-http": + mcp.run( + transport=transport, + host=config.server.host, + port=config.server.port, + path=config.server.path, + ) + else: + mcp.run(transport=transport) - except KeyboardInterrupt: - exit_gracefully(0) + except KeyboardInterrupt: + exit_gracefully(0) - except Exception as e: - logger.exception(f"Server runtime error: {e}") - if config.is_interactive: - print(f"\nโŒ Server error: {e}") - exit_gracefully(1) + except Exception as e: + logger.exception(f"Server runtime error: {e}") + if config.is_interactive: + print(f"\nโŒ Server error: {e}") + exit_gracefully(1) + finally: + teardown_trace_logging(keep_traces=should_keep_traces()) def exit_gracefully(exit_code: int = 0) -> None: diff --git a/linkedin_mcp_server/debug_trace.py b/linkedin_mcp_server/debug_trace.py index 62b1e6dd..a690108a 100644 --- a/linkedin_mcp_server/debug_trace.py +++ b/linkedin_mcp_server/debug_trace.py @@ -1,22 +1,107 @@ -"""Best-effort page tracing for manual LinkedIn debugging.""" +"""Best-effort trace capture with on-error retention.""" from __future__ import annotations import itertools import json import os -import re from pathlib import Path -from typing import Any +import re +import shutil +import tempfile +from typing import Any, Literal + +from linkedin_mcp_server.session_state import auth_root_dir + +TraceMode = Literal["off", "on_error", "always"] _TRACE_COUNTER = itertools.count(1) +_TRACE_DIR: Path | None = None +_TRACE_KEEP = False +_EXPLICIT_TRACE_DIR = False + + +def _trace_mode() -> TraceMode: + raw = os.getenv("LINKEDIN_TRACE_MODE", "").strip().lower() + if raw in {"off", "false", "0", "no"}: + return "off" + if raw in {"always", "keep", "persist"}: + return "always" + return "on_error" + + +def _trace_root() -> Path: + source_profile = Path( + os.getenv("USER_DATA_DIR", "~/.linkedin-mcp/profile") + ).expanduser() + root = auth_root_dir(source_profile) / "trace-runs" + root.mkdir(parents=True, exist_ok=True) + return root + + +def trace_enabled() -> bool: + return ( + bool(os.getenv("LINKEDIN_DEBUG_TRACE_DIR", "").strip()) + or _trace_mode() != "off" + ) def get_trace_dir() -> Path | None: - raw = os.getenv("LINKEDIN_DEBUG_TRACE_DIR", "").strip() - if not raw: + global _TRACE_DIR, _EXPLICIT_TRACE_DIR + + explicit = os.getenv("LINKEDIN_DEBUG_TRACE_DIR", "").strip() + if explicit: + _EXPLICIT_TRACE_DIR = True + if _TRACE_DIR is None: + _TRACE_DIR = Path(explicit).expanduser().resolve() + return _TRACE_DIR + + if _trace_mode() == "off": return None - return Path(raw).expanduser().resolve() + + if _TRACE_DIR is None: + _TRACE_DIR = Path( + tempfile.mkdtemp( + prefix="run-", + dir=_trace_root(), + ) + ).resolve() + return _TRACE_DIR + + +def mark_trace_for_retention() -> Path | None: + global _TRACE_KEEP + trace_dir = get_trace_dir() + if trace_dir is not None: + trace_dir.mkdir(parents=True, exist_ok=True) + _TRACE_KEEP = True + return trace_dir + + +def should_keep_traces() -> bool: + return _EXPLICIT_TRACE_DIR or _TRACE_KEEP or _trace_mode() == "always" + + +def cleanup_trace_dir() -> None: + global _TRACE_DIR, _TRACE_KEEP, _EXPLICIT_TRACE_DIR + + trace_dir = _TRACE_DIR + if trace_dir is None or should_keep_traces(): + return + try: + shutil.rmtree(trace_dir) + except OSError: + return + _TRACE_DIR = None + _TRACE_KEEP = False + _EXPLICIT_TRACE_DIR = False + + +def reset_trace_state_for_testing() -> None: + global _TRACE_DIR, _TRACE_KEEP, _EXPLICIT_TRACE_DIR + _TRACE_DIR = None + _TRACE_KEEP = False + _EXPLICIT_TRACE_DIR = False def _slugify_step(step: str) -> str: @@ -26,7 +111,7 @@ def _slugify_step(step: str) -> str: async def record_page_trace( page: Any, step: str, *, extra: dict[str, Any] | None = None ) -> None: - """Persist a screenshot and basic page state when trace debugging is enabled.""" + """Persist a screenshot and basic page state when trace capture is enabled.""" trace_dir = get_trace_dir() if trace_dir is None: return diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 94e57499..8320049d 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -9,8 +9,10 @@ from pathlib import Path import re from typing import Any +from urllib.parse import quote_plus +from urllib.request import Request, urlopen -from linkedin_mcp_server.debug_trace import get_trace_dir +from linkedin_mcp_server.debug_trace import get_trace_dir, mark_trace_for_retention from linkedin_mcp_server.session_state import ( auth_root_dir, get_runtime_id, @@ -24,6 +26,7 @@ ISSUE_URL = "https://github.com/stickerdaniel/linkedin-mcp-server/issues/new/choose" ISSUE_TITLE_PREFIX = "[BUG]" +ISSUE_SEARCH_API = "https://api.github.com/search/issues" def build_issue_diagnostics( @@ -39,7 +42,7 @@ def build_issue_diagnostics( current_runtime_id = get_runtime_id() source_state = load_source_state(source_profile_dir) runtime_state = load_runtime_state(current_runtime_id, source_profile_dir) - trace_dir = get_trace_dir() + trace_dir = mark_trace_for_retention() or get_trace_dir() log_path = trace_dir / "server.log" if trace_dir else None issue_dir = trace_dir or (auth_root_dir(source_profile_dir) / "issue-reports") issue_dir.mkdir(parents=True, exist_ok=True) @@ -81,6 +84,7 @@ def build_issue_diagnostics( current_runtime_id=current_runtime_id, ), } + payload["existing_issues"] = _find_existing_issues(payload) issue_template = _render_issue_template(payload) issue_path.write_text(issue_template) payload["issue_template_path"] = str(issue_path) @@ -105,26 +109,51 @@ def format_tool_error_with_diagnostics( lines.append( f"- Runtime: {runtime.get('current_runtime_id', 'unknown')} on {runtime.get('hostname', 'unknown')}" ) - lines.append(f"- File the issue here: {ISSUE_URL}") + existing_issues = diagnostics.get("existing_issues") or [] + if existing_issues: + lines.append("- Matching open issues were found. Review them first:") + for issue in existing_issues: + lines.append(f" - #{issue['number']}: {issue['title']} ({issue['url']})") + lines.append( + "- If one matches this failure, upload the gist and post it as a comment on that issue instead of opening a new issue." + ) + else: + lines.append(f"- File the issue here: {ISSUE_URL}") lines.append( - "- Read the generated issue template and attach the listed files to the GitHub issue." + "- Read the generated issue template and attach the listed files before posting." ) return "\n".join(lines) def _render_issue_template(payload: dict[str, Any]) -> str: runtime = payload["runtime"] + existing_issues = payload.get("existing_issues") or [] + has_existing_issues = bool(existing_issues) return ( "\n".join( [ "# LinkedIn MCP scrape failure", "", "## File This Issue", - f"- GitHub issue link: {ISSUE_URL}", f"- Suggested title: {payload['suggested_issue_title']}", "- Read this generated file before posting.", "- Copy the sections below into the GitHub bug report template.", "- Attach this generated markdown file, the server log, and the trace artifacts directory.", + ( + "- Review the existing open issues below first. If one matches, post the gist as a comment there instead of opening a new issue." + if has_existing_issues + else f"- GitHub issue link: {ISSUE_URL}" + ), + "", + "## Existing Open Issues", + *( + [ + f"- #{issue['number']}: {issue['title']} ({issue['url']})" + for issue in existing_issues + ] + if has_existing_issues + else ["- No matching open issues found during diagnostics."] + ), "", "## Installation Method", "- [x] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:latest` with local repo mounted into `/app`", @@ -197,6 +226,11 @@ def _render_issue_template(payload: dict[str, Any]) -> str: "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", "2. Start the local Docker server with the same debug env vars used for this run.", "3. Re-run the failing MCP tool call.", + ( + "4. If one of the listed open issues matches, post the gist as a comment there as additional information." + if has_existing_issues + else "4. If no existing issue matches, open a new GitHub bug report with the information above." + ), ] ) + "\n" @@ -246,5 +280,45 @@ def _build_gist_command( return f'gh gist create {quoted} -d "LinkedIn MCP debug artifacts"' +def _find_existing_issues(payload: dict[str, Any]) -> list[dict[str, Any]]: + query = _issue_search_query(payload) + if not query: + return [] + + request = Request( + f"{ISSUE_SEARCH_API}?q={quote_plus(query)}&per_page=3", + headers={ + "Accept": "application/vnd.github+json", + "User-Agent": "linkedin-mcp-server-diagnostics", + }, + ) + try: + with urlopen(request, timeout=3) as response: + data = json.loads(response.read().decode("utf-8")) + except Exception: + return [] + + issues: list[dict[str, Any]] = [] + for item in data.get("items", []): + issues.append( + { + "number": item.get("number"), + "title": item.get("title"), + "url": item.get("html_url"), + } + ) + return issues + + +def _issue_search_query(payload: dict[str, Any]) -> str: + route = payload.get("target_url") or payload.get("context") or "" + if "/recent-activity/" in route: + summary = '"recent-activity redirect loop"' + else: + section = payload.get("section_name") or "scrape" + summary = f'"{section}"' + return f"repo:stickerdaniel/linkedin-mcp-server is:issue is:open {summary}" + + def _utcnow() -> str: return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 1454569d..7084a37a 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -11,7 +11,9 @@ import logging from typing import Any, Dict -from linkedin_mcp_server.debug_trace import get_trace_dir +from linkedin_mcp_server.debug_trace import cleanup_trace_dir, get_trace_dir + +_TRACE_FILE_HANDLER: logging.Handler | None = None class MCPJSONFormatter(logging.Formatter): @@ -103,6 +105,13 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> # Remove existing handlers for handler in root_logger.handlers[:]: root_logger.removeHandler(handler) + try: + handler.close() + except Exception: + pass + + global _TRACE_FILE_HANDLER + _TRACE_FILE_HANDLER = None # Add console handler console_handler = logging.StreamHandler() @@ -115,9 +124,26 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> file_handler = logging.FileHandler(trace_dir / "server.log", encoding="utf-8") file_handler.setFormatter(formatter) root_logger.addHandler(file_handler) + _TRACE_FILE_HANDLER = file_handler # Set specific loggers to reduce noise logging.getLogger("urllib3").setLevel(logging.ERROR) logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) logging.getLogger("fakeredis").setLevel(logging.WARNING) logging.getLogger("docket").setLevel(logging.WARNING) + + +def teardown_trace_logging(*, keep_traces: bool = False) -> None: + """Close trace logging handlers and cleanup ephemeral traces when allowed.""" + global _TRACE_FILE_HANDLER + + if _TRACE_FILE_HANDLER is not None: + root_logger = logging.getLogger() + root_logger.removeHandler(_TRACE_FILE_HANDLER) + try: + _TRACE_FILE_HANDLER.close() + finally: + _TRACE_FILE_HANDLER = None + + if not keep_traces: + cleanup_trace_dir() diff --git a/tests/test_debug_trace.py b/tests/test_debug_trace.py new file mode 100644 index 00000000..5a4771af --- /dev/null +++ b/tests/test_debug_trace.py @@ -0,0 +1,64 @@ +from linkedin_mcp_server.debug_trace import ( + cleanup_trace_dir, + get_trace_dir, + mark_trace_for_retention, + reset_trace_state_for_testing, +) + + +def setup_function(): + reset_trace_state_for_testing() + + +def teardown_function(): + reset_trace_state_for_testing() + + +def test_get_trace_dir_creates_ephemeral_dir_by_default(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + + trace_dir = get_trace_dir() + + assert trace_dir is not None + assert trace_dir.exists() + assert "trace-runs" in str(trace_dir) + + +def test_cleanup_trace_dir_removes_ephemeral_dir_by_default(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + trace_dir = get_trace_dir() + assert trace_dir is not None + + cleanup_trace_dir() + + assert not trace_dir.exists() + + +def test_mark_trace_for_retention_keeps_trace_dir(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + trace_dir = mark_trace_for_retention() + assert trace_dir is not None + + cleanup_trace_dir() + + assert trace_dir.exists() + + +def test_explicit_trace_dir_is_preserved(monkeypatch, tmp_path): + trace_dir = tmp_path / "explicit-trace" + monkeypatch.setenv("LINKEDIN_DEBUG_TRACE_DIR", str(trace_dir)) + + resolved = get_trace_dir() + assert resolved == trace_dir + trace_dir.mkdir(parents=True, exist_ok=True) + + cleanup_trace_dir() + + assert trace_dir.exists() + + +def test_trace_mode_off_disables_trace_dir(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setenv("LINKEDIN_TRACE_MODE", "off") + + assert get_trace_dir() is None diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py new file mode 100644 index 00000000..c1631ef6 --- /dev/null +++ b/tests/test_error_diagnostics.py @@ -0,0 +1,78 @@ +from linkedin_mcp_server.error_diagnostics import ( + build_issue_diagnostics, + format_tool_error_with_diagnostics, +) + + +def test_build_issue_diagnostics_includes_existing_issues(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics._find_existing_issues", + lambda payload: [ + { + "number": 220, + "title": "[BUG] recent-activity redirect loop in posts on linux-arm64-container", + "url": "https://github.com/stickerdaniel/linkedin-mcp-server/issues/220", + } + ], + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="extract-page", + target_url="https://www.linkedin.com/in/williamhgates/recent-activity/all/", + section_name="posts", + ) + + assert diagnostics["existing_issues"][0]["number"] == 220 + assert diagnostics["section_name"] == "posts" + assert diagnostics["runtime"]["trace_dir"] is not None + issue_body = diagnostics["issue_template"] + assert "## Existing Open Issues" in issue_body + assert "#220" in issue_body + assert "post the gist as a comment there" in issue_body + + +def test_format_tool_error_with_diagnostics_prefers_existing_issue_comment_flow(): + diagnostics = { + "issue_template_path": "/tmp/issue.md", + "existing_issues": [ + { + "number": 220, + "title": "[BUG] recent-activity redirect loop in posts on linux-arm64-container", + "url": "https://github.com/stickerdaniel/linkedin-mcp-server/issues/220", + } + ], + "runtime": { + "trace_dir": "/tmp/trace", + "log_path": "/tmp/trace/server.log", + "suggested_gist_command": 'gh gist create "/tmp/issue.md"', + "current_runtime_id": "linux-arm64-container", + "hostname": "test-host", + }, + } + + message = format_tool_error_with_diagnostics("Scrape failed", diagnostics) + + assert "Matching open issues were found" in message + assert "#220" in message + assert "post it as a comment" in message + assert "File the issue here" not in message + + +def test_find_existing_issues_query_failure_is_tolerated(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics.urlopen", + lambda *args, **kwargs: (_ for _ in ()).throw(OSError("no network")), + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="extract-page", + target_url="https://www.linkedin.com/in/test/", + section_name="main_profile", + ) + + assert diagnostics["existing_issues"] == [] From 45e7c8b5844a8c220d8887480ae47addb443aec4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:21:55 +0100 Subject: [PATCH 498/565] fix(auth): harden remember-me prompt --- linkedin_mcp_server/core/auth.py | 12 ++++++++++-- tests/test_cli_main.py | 5 +++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 0326f3c7..d8a73706 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -191,8 +191,16 @@ async def resolve_remember_me_prompt(page: Page) -> bool: logger.debug("Remember-me container did not appear in time") return False - target = page.locator(_REMEMBER_ME_BUTTON_SELECTOR).first - target_count = await page.locator(_REMEMBER_ME_BUTTON_SELECTOR).count() + target_locator = page.locator(_REMEMBER_ME_BUTTON_SELECTOR) + target = target_locator.first + try: + target_count = await target_locator.count() + except Exception: + logger.debug( + "Could not count remember-me buttons; continuing with first match", + exc_info=True, + ) + target_count = -1 logger.debug( "Remember-me target count for %s: %d", _REMEMBER_ME_BUTTON_SELECTOR, diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py index 9b875085..9ed712b8 100644 --- a/tests/test_cli_main.py +++ b/tests/test_cli_main.py @@ -209,8 +209,8 @@ def test_profile_info_reports_bridge_required_for_foreign_runtime( assert exit_info.value.code == 0 captured = capsys.readouterr() - assert "derived (missing)" in captured.out.lower() - assert "checkpoint-committed" in captured.out.lower() + assert "fresh bridge each startup" in captured.out.lower() + assert "fresh bridged foreign-runtime session" in captured.out.lower() def test_profile_info_reports_committed_derived_runtime( @@ -272,6 +272,7 @@ def test_profile_info_reports_committed_derived_runtime( monkeypatch.setattr( "linkedin_mcp_server.cli_main.get_runtime_id", lambda: "linux-amd64-container" ) + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") monkeypatch.setattr("linkedin_mcp_server.cli_main.get_config", lambda: AppConfig()) monkeypatch.setattr( "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None From df77491a6105cf1f86bbd99b0769102117d5dc51 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:22:34 +0100 Subject: [PATCH 499/565] docs(agents): clarify draft PR workflow --- AGENTS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 07e91b57..165fa8bd 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -195,7 +195,7 @@ Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or worki 3. Implement the feature 4. Test the feature 5. Make sure the README.md, docs/docker-hub.md and AGENTS.md is updated with the new feature - 6. Create a PR with a short description of the feature/fix + 6. Create a draft PR with a short description of the feature/fix, and keep it in draft until it is ready to merge; only then convert it to a regular PR. 7. First review the PR with ai agents. 8. Manually review the PR and merge it if it's approved. Do not squash the commits. 9. Delete the branch after the PR is merged. From 34ca6a86998231e562d1e41cd18bb0df67c0fe55 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:32:55 +0100 Subject: [PATCH 500/565] refactor(debug): share nav stabilization --- linkedin_mcp_server/debug_utils.py | 35 +++++++++++++++++ linkedin_mcp_server/drivers/browser.py | 48 ++++++----------------- linkedin_mcp_server/scraping/extractor.py | 36 +++-------------- 3 files changed, 53 insertions(+), 66 deletions(-) create mode 100644 linkedin_mcp_server/debug_utils.py diff --git a/linkedin_mcp_server/debug_utils.py b/linkedin_mcp_server/debug_utils.py new file mode 100644 index 00000000..b975f4b3 --- /dev/null +++ b/linkedin_mcp_server/debug_utils.py @@ -0,0 +1,35 @@ +"""Shared debug-only helpers for slower, traceable navigation flows.""" + +from __future__ import annotations + +import asyncio +import logging +import os + +_NAV_STABILIZE_DELAY_SECONDS = 5.0 + + +def debug_stabilize_navigation_enabled() -> bool: + """Return whether debug-only navigation stabilization sleeps are enabled.""" + return os.getenv("LINKEDIN_DEBUG_STABILIZE_NAVIGATION", "").strip().lower() in { + "1", + "true", + "yes", + "on", + } + + +async def stabilize_navigation(label: str, logger: logging.Logger) -> None: + """Pause between navigation steps to help debug timing-sensitive flows.""" + if ( + os.environ.get("PYTEST_CURRENT_TEST") + or not debug_stabilize_navigation_enabled() + ): + return + + logger.debug( + "Stabilizing navigation for %.1fs after %s", + _NAV_STABILIZE_DELAY_SECONDS, + label, + ) + await asyncio.sleep(_NAV_STABILIZE_DELAY_SECONDS) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 477a8b08..00cc07a2 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -9,7 +9,6 @@ import logging import os from pathlib import Path -import asyncio from linkedin_mcp_server.core import ( AuthenticationError, @@ -22,6 +21,7 @@ from linkedin_mcp_server.config import get_config from linkedin_mcp_server.debug_trace import record_page_trace +from linkedin_mcp_server.debug_utils import stabilize_navigation from linkedin_mcp_server.session_state import ( SourceState, clear_runtime_profile, @@ -45,32 +45,6 @@ _browser: BrowserManager | None = None _browser_cookie_export_path: Path | None = None _headless: bool = True -_NAV_STABILIZE_DELAY_SECONDS = 5.0 - - -def _debug_stabilize_navigation_enabled() -> bool: - """Return whether debug-only startup stabilization sleeps are enabled.""" - return os.getenv("LINKEDIN_DEBUG_STABILIZE_NAVIGATION", "").strip().lower() in { - "1", - "true", - "yes", - "on", - } - - -async def _stabilize_navigation(label: str) -> None: - """Pause between LinkedIn startup actions to rule out timing issues.""" - if ( - os.environ.get("PYTEST_CURRENT_TEST") - or not _debug_stabilize_navigation_enabled() - ): - return - logger.debug( - "Stabilizing navigation for %.1fs after %s", - _NAV_STABILIZE_DELAY_SECONDS, - label, - ) - await asyncio.sleep(_NAV_STABILIZE_DELAY_SECONDS) def _debug_skip_checkpoint_restart() -> bool: @@ -159,7 +133,7 @@ async def _feed_auth_succeeds( "https://www.linkedin.com/feed/", wait_until="domcontentloaded", ) - await _stabilize_navigation("feed navigation") + await stabilize_navigation("feed navigation", logger) await record_page_trace( browser.page, "feed-after-goto", @@ -167,7 +141,7 @@ async def _feed_auth_succeeds( ) if allow_remember_me: if await resolve_remember_me_prompt(browser.page): - await _stabilize_navigation("remember-me resolution") + await stabilize_navigation("remember-me resolution", logger) await record_page_trace( browser.page, "feed-after-remember-me", @@ -185,7 +159,9 @@ async def _feed_auth_succeeds( return True except Exception as exc: if allow_remember_me and await resolve_remember_me_prompt(browser.page): - await _stabilize_navigation("remember-me resolution after feed failure") + await stabilize_navigation( + "remember-me resolution after feed failure", logger + ) await record_page_trace( browser.page, "feed-after-remember-me-error-recovery", @@ -284,13 +260,13 @@ async def _bridge_runtime_profile( await browser.page.goto( "https://www.linkedin.com/feed/", wait_until="domcontentloaded" ) - await _stabilize_navigation("pre-import feed navigation") + await stabilize_navigation("pre-import feed navigation", logger) await record_page_trace(browser.page, "bridge-after-pre-import-feed") if not await browser.import_cookies(cookie_path, preset_name=cookie_preset): raise AuthenticationError( "Portable authentication could not be imported. Run with --login to create a fresh source session." ) - await _stabilize_navigation("bridge cookie import") + await stabilize_navigation("bridge cookie import", logger) await record_page_trace( browser.page, "bridge-after-cookie-import", @@ -300,7 +276,7 @@ async def _bridge_runtime_profile( raise AuthenticationError( "No authentication found. Run with --login to create a profile." ) - await _stabilize_navigation("post-import feed validation") + await stabilize_navigation("post-import feed validation", logger) await record_page_trace(browser.page, "bridge-after-feed-validation") if not persist_runtime: logger.info( @@ -322,7 +298,7 @@ async def _bridge_runtime_profile( raise AuthenticationError( "Derived runtime session could not be checkpointed. Run with --login to create a fresh source session." ) - await _stabilize_navigation("runtime storage-state export") + await stabilize_navigation("runtime storage-state export", logger) logger.info("Checkpoint-restarting derived runtime profile %s", profile_dir) await browser.close() reopened = _make_browser( @@ -331,7 +307,7 @@ async def _bridge_runtime_profile( viewport=viewport, ) await reopened.start() - await _stabilize_navigation("derived profile reopen") + await stabilize_navigation("derived profile reopen", logger) await record_page_trace( reopened.page, "bridge-after-profile-reopen", @@ -345,7 +321,7 @@ async def _bridge_runtime_profile( raise AuthenticationError( "Derived runtime validation failed; no automatic re-bridge will be attempted. Run with --login to create a fresh source session." ) - await _stabilize_navigation("post-reopen feed validation") + await stabilize_navigation("post-reopen feed validation", logger) await record_page_trace(reopened.page, "bridge-after-reopen-validation") write_runtime_state( runtime_id, diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 315aac12..514946d6 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -3,7 +3,6 @@ import asyncio from dataclasses import dataclass import logging -import os import re from typing import Any, Literal from urllib.parse import quote_plus @@ -20,6 +19,7 @@ LinkedInScraperException, ) from linkedin_mcp_server.debug_trace import record_page_trace +from linkedin_mcp_server.debug_utils import stabilize_navigation from linkedin_mcp_server.error_diagnostics import build_issue_diagnostics from linkedin_mcp_server.core.utils import ( detect_rate_limit, @@ -41,7 +41,6 @@ # Delay between page navigations to avoid rate limiting _NAV_DELAY = 2.0 -_NAV_STABILIZE_DELAY = 5.0 # Backoff before retrying a rate-limited page _RATE_LIMIT_RETRY_DELAY = 5.0 @@ -84,31 +83,6 @@ _SORT_BY_MAP = {"date": "DD", "relevance": "R"} -def _debug_stabilize_navigation_enabled() -> bool: - """Return whether debug-only scraper stabilization sleeps are enabled.""" - return os.getenv("LINKEDIN_DEBUG_STABILIZE_NAVIGATION", "").strip().lower() in { - "1", - "true", - "yes", - "on", - } - - -async def _stabilize_navigation(label: str) -> None: - """Pause between LinkedIn navigations to rule out timing issues.""" - if ( - os.environ.get("PYTEST_CURRENT_TEST") - or not _debug_stabilize_navigation_enabled() - ): - return - logger.debug( - "Stabilizing navigation for %.1fs after %s", - _NAV_STABILIZE_DELAY, - label, - ) - await asyncio.sleep(_NAV_STABILIZE_DELAY) - - def _normalize_csv(value: str, mapping: dict[str, str]) -> str: """Normalize a comma-separated filter value using the provided mapping.""" parts = [v.strip() for v in value.split(",")] @@ -286,7 +260,7 @@ def record_navigation(frame: Any) -> None: ) try: await self._page.goto(url, wait_until=wait_until, timeout=30000) - await _stabilize_navigation(f"goto {url}") + await stabilize_navigation(f"goto {url}", logger) await record_page_trace( self._page, "extractor-after-goto", @@ -294,7 +268,9 @@ def record_navigation(frame: Any) -> None: ) except Exception as exc: if allow_remember_me and await resolve_remember_me_prompt(self._page): - await _stabilize_navigation(f"remember-me resolution for {url}") + await stabilize_navigation( + f"remember-me resolution for {url}", logger + ) await record_page_trace( self._page, "extractor-after-remember-me", @@ -328,7 +304,7 @@ def record_navigation(frame: Any) -> None: return if allow_remember_me and await resolve_remember_me_prompt(self._page): - await _stabilize_navigation(f"remember-me retry for {url}") + await stabilize_navigation(f"remember-me retry for {url}", logger) await record_page_trace( self._page, "extractor-after-remember-me-retry", From fd5e0c62dcd76e70faed2ce57d80502855217041 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:44:06 +0100 Subject: [PATCH 501/565] docs(agents): remove redundant note on PR review comments --- AGENTS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 165fa8bd..eca8c4a8 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -202,7 +202,7 @@ Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or worki ## PR Reviews -Greptile posts initial reviews as PR review comments, but follow-ups as **issue comments**. Always check both. To trigger a re-review, comment `@greptileai review` on the PR. +Greptile posts initial reviews as PR review comments, but follow-ups as **issue comments**. Always check both. ```bash gh api repos/{owner}/{repo}/pulls/{pr}/reviews # initial reviews From 404980245faa40a9eca0aebe649578b0d964ccf6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 00:51:02 +0100 Subject: [PATCH 502/565] fix(diagnostics): avoid blocking issue search --- linkedin_mcp_server/cli_main.py | 6 +- linkedin_mcp_server/common_utils.py | 16 ++++ linkedin_mcp_server/debug_trace.py | 20 +++-- linkedin_mcp_server/drivers/browser.py | 4 +- linkedin_mcp_server/error_diagnostics.py | 96 ++++++++++++++++++------ linkedin_mcp_server/session_state.py | 10 +-- scripts/debug_cookie_bridge.py | 4 +- tests/conftest.py | 1 + tests/test_authentication.py | 9 +++ tests/test_debug_trace.py | 38 ++++++++++ tests/test_error_diagnostics.py | 53 +++++++++++++ 11 files changed, 213 insertions(+), 44 deletions(-) create mode 100644 linkedin_mcp_server/common_utils.py diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 3c41f6d1..242978c3 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -21,11 +21,11 @@ ) from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.browser import ( + experimental_persist_derived_runtime, close_browser, get_or_create_browser, get_profile_dir, profile_exists, - _experimental_persist_derived_runtime, set_headless, ) from linkedin_mcp_server.debug_trace import should_keep_traces @@ -167,7 +167,7 @@ def profile_info_and_exit() -> None: runtime_state = load_runtime_state(current_runtime, profile_dir) runtime_profile = runtime_profile_dir(current_runtime, profile_dir) runtime_storage_state = runtime_storage_state_path(current_runtime, profile_dir) - if not _experimental_persist_derived_runtime(): + if not experimental_persist_derived_runtime(): bridge_required = True print("Profile mode: foreign runtime (fresh bridge each startup)") if runtime_profile.exists(): @@ -208,7 +208,7 @@ async def check_session() -> bool: await close_browser() if bridge_required: - if _experimental_persist_derived_runtime(): + if experimental_persist_derived_runtime(): print( "โ„น๏ธ A derived runtime profile will be created and checkpoint-committed on the next server startup." ) diff --git a/linkedin_mcp_server/common_utils.py b/linkedin_mcp_server/common_utils.py new file mode 100644 index 00000000..91c486fb --- /dev/null +++ b/linkedin_mcp_server/common_utils.py @@ -0,0 +1,16 @@ +"""Small shared helpers used across diagnostics and session-state modules.""" + +from __future__ import annotations + +from datetime import UTC, datetime +import re + + +def slugify_fragment(value: str) -> str: + """Return a lowercase URL/file-safe fragment.""" + return re.sub(r"[^a-z0-9]+", "-", value.lower()).strip("-") + + +def utcnow_iso() -> str: + """Return the current UTC timestamp in a compact ISO-8601 form.""" + return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/debug_trace.py b/linkedin_mcp_server/debug_trace.py index a690108a..c26162ac 100644 --- a/linkedin_mcp_server/debug_trace.py +++ b/linkedin_mcp_server/debug_trace.py @@ -6,12 +6,12 @@ import json import os from pathlib import Path -import re import shutil import tempfile from typing import Any, Literal -from linkedin_mcp_server.session_state import auth_root_dir +from linkedin_mcp_server.common_utils import slugify_fragment +from linkedin_mcp_server.session_state import auth_root_dir, get_source_profile_dir TraceMode = Literal["off", "on_error", "always"] @@ -31,9 +31,7 @@ def _trace_mode() -> TraceMode: def _trace_root() -> Path: - source_profile = Path( - os.getenv("USER_DATA_DIR", "~/.linkedin-mcp/profile") - ).expanduser() + source_profile = _safe_source_profile_dir() root = auth_root_dir(source_profile) / "trace-runs" root.mkdir(parents=True, exist_ok=True) return root @@ -98,14 +96,22 @@ def cleanup_trace_dir() -> None: def reset_trace_state_for_testing() -> None: - global _TRACE_DIR, _TRACE_KEEP, _EXPLICIT_TRACE_DIR + global _TRACE_COUNTER, _TRACE_DIR, _TRACE_KEEP, _EXPLICIT_TRACE_DIR + _TRACE_COUNTER = itertools.count(1) _TRACE_DIR = None _TRACE_KEEP = False _EXPLICIT_TRACE_DIR = False def _slugify_step(step: str) -> str: - return re.sub(r"[^a-z0-9]+", "-", step.lower()).strip("-") + return slugify_fragment(step) + + +def _safe_source_profile_dir() -> Path: + try: + return get_source_profile_dir() + except BaseException: + return Path(os.getenv("USER_DATA_DIR", "~/.linkedin-mcp/profile")).expanduser() async def record_page_trace( diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 00cc07a2..5d23bef6 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -67,7 +67,7 @@ def _debug_bridge_every_startup() -> bool: } -def _experimental_persist_derived_runtime() -> bool: +def experimental_persist_derived_runtime() -> bool: """Return whether Docker-style foreign runtimes should reuse derived profiles.""" return os.getenv( "LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "" @@ -398,7 +398,7 @@ async def get_or_create_browser( _browser_cookie_export_path = cookie_path return _browser - persist_runtime = _experimental_persist_derived_runtime() + persist_runtime = experimental_persist_derived_runtime() force_bridge = _debug_bridge_every_startup() if not persist_runtime: diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 8320049d..947313c7 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -2,16 +2,16 @@ from __future__ import annotations +import asyncio from dataclasses import asdict -from datetime import UTC, datetime import json import socket from pathlib import Path -import re from typing import Any from urllib.parse import quote_plus from urllib.request import Request, urlopen +from linkedin_mcp_server.common_utils import slugify_fragment, utcnow_iso from linkedin_mcp_server.debug_trace import get_trace_dir, mark_trace_for_retention from linkedin_mcp_server.session_state import ( auth_root_dir, @@ -37,7 +37,7 @@ def build_issue_diagnostics( section_name: str | None = None, ) -> dict[str, Any]: """Write an issue-ready report and return structured diagnostics.""" - timestamp = _utcnow() + timestamp = utcnow_iso() source_profile_dir = _safe_source_profile_dir() current_runtime_id = get_runtime_id() source_state = load_source_state(source_profile_dir) @@ -48,7 +48,7 @@ def build_issue_diagnostics( issue_dir.mkdir(parents=True, exist_ok=True) issue_path = ( issue_dir - / f"{timestamp.replace(':', '').replace('-', '')}-{_slugify(context)}.md" + / f"{timestamp.replace(':', '').replace('-', '')}-{slugify_fragment(context) or 'issue'}.md" ) gist_command = _build_gist_command(issue_dir, issue_path, log_path) @@ -129,6 +129,8 @@ def _render_issue_template(payload: dict[str, Any]) -> str: runtime = payload["runtime"] existing_issues = payload.get("existing_issues") or [] has_existing_issues = bool(existing_issues) + installation_lines = _installation_method_lines(runtime) + tool_lines = _tool_lines(payload) return ( "\n".join( [ @@ -156,18 +158,12 @@ def _render_issue_template(payload: dict[str, Any]) -> str: ), "", "## Installation Method", - "- [x] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:latest` with local repo mounted into `/app`", - "- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._", - "- [ ] Local Python setup", + *installation_lines, "", "## When does the error occur?", "- [ ] At startup", "- [x] During tool call (specify which tool):", - " - [x] get_person_profile", - " - [ ] get_company_profile", - " - [ ] get_job_details", - " - [ ] search_jobs", - " - [ ] close_session", + *tool_lines, "", "## MCP Client Configuration", "", @@ -224,7 +220,7 @@ def _render_issue_template(payload: dict[str, Any]) -> str: "", "## Reproduction", "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", - "2. Start the local Docker server with the same debug env vars used for this run.", + "2. Start the server again using the same installation method and debug env vars used for this run.", "3. Re-run the failing MCP tool call.", ( "4. If one of the listed open issues matches, post the gist as a comment there as additional information." @@ -237,11 +233,6 @@ def _render_issue_template(payload: dict[str, Any]) -> str: ) -def _slugify(value: str) -> str: - slug = re.sub(r"[^a-z0-9]+", "-", value.lower()).strip("-") - return slug or "issue" - - def _safe_source_profile_dir(): try: return get_source_profile_dir() @@ -281,6 +272,9 @@ def _build_gist_command( def _find_existing_issues(payload: dict[str, Any]) -> list[dict[str, Any]]: + if _inside_running_event_loop(): + return [] + query = _issue_search_query(payload) if not query: return [] @@ -310,6 +304,68 @@ def _find_existing_issues(payload: dict[str, Any]) -> list[dict[str, Any]]: return issues +def _inside_running_event_loop() -> bool: + try: + asyncio.get_running_loop() + except RuntimeError: + return False + return True + + +def _installation_method_lines(runtime: dict[str, Any]) -> list[str]: + current_runtime_id = str(runtime.get("current_runtime_id") or "") + docker_checked = "x" if "container" in current_runtime_id else " " + return [ + f"- [{docker_checked}] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:latest` with `~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`", + "- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._", + "- [ ] Local Python setup", + ] + + +def _tool_lines(payload: dict[str, Any]) -> list[str]: + selected_tool = _tool_name_for_context(payload) + tool_names = [ + "get_person_profile", + "get_company_profile", + "get_company_posts", + "get_job_details", + "search_jobs", + "search_people", + "close_session", + ] + return [ + f" - [{'x' if tool_name == selected_tool else ' '}] {tool_name}" + for tool_name in tool_names + ] + + +def _tool_name_for_context(payload: dict[str, Any]) -> str | None: + context = str(payload.get("context") or "") + if context in { + "get_person_profile", + "get_company_profile", + "get_company_posts", + "get_job_details", + "search_jobs", + "search_people", + "close_session", + }: + return context + + if context in {"extract_page", "extract_overlay", "scrape_person"}: + return "get_person_profile" + if context == "scrape_company": + return "get_company_profile" + if context == "extract_search_page": + target_url = str(payload.get("target_url") or "") + if "/search/results/people" in target_url: + return "search_people" + if "/jobs/search" in target_url: + return "search_jobs" + + return None + + def _issue_search_query(payload: dict[str, Any]) -> str: route = payload.get("target_url") or payload.get("context") or "" if "/recent-activity/" in route: @@ -318,7 +374,3 @@ def _issue_search_query(payload: dict[str, Any]) -> str: section = payload.get("section_name") or "scrape" summary = f'"{section}"' return f"repo:stickerdaniel/linkedin-mcp-server is:issue is:open {summary}" - - -def _utcnow() -> str: - return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py index 8c46b49f..4b0a1af7 100644 --- a/linkedin_mcp_server/session_state.py +++ b/linkedin_mcp_server/session_state.py @@ -3,7 +3,6 @@ from __future__ import annotations from dataclasses import asdict, dataclass -from datetime import UTC, datetime import json import logging import platform @@ -12,6 +11,7 @@ from typing import Any from uuid import uuid4 +from linkedin_mcp_server.common_utils import utcnow_iso from linkedin_mcp_server.config import get_config logger = logging.getLogger(__name__) @@ -164,7 +164,7 @@ def write_source_state(source_profile_dir: Path | None = None) -> SourceState: version=1, source_runtime_id=get_runtime_id(), login_generation=str(uuid4()), - created_at=_utcnow(), + created_at=utcnow_iso(), profile_path=str(profile_dir), cookies_path=str(portable_cookie_path(profile_dir)), ) @@ -196,7 +196,7 @@ def write_runtime_state( ) -> RuntimeState: """Write metadata for a derived runtime session.""" profile_dir = runtime_profile_dir(runtime_id, source_profile_dir).resolve() - committed_at = _utcnow() + committed_at = utcnow_iso() state = RuntimeState( version=1, runtime_id=runtime_id, @@ -269,7 +269,3 @@ def _load_json(path: Path) -> dict[str, Any] | None: def _write_json(path: Path, payload: dict[str, Any]) -> None: path.parent.mkdir(parents=True, exist_ok=True) path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n") - - -def _utcnow() -> str: - return datetime.now(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") diff --git a/scripts/debug_cookie_bridge.py b/scripts/debug_cookie_bridge.py index 0d4b7d59..80ab8e13 100644 --- a/scripts/debug_cookie_bridge.py +++ b/scripts/debug_cookie_bridge.py @@ -16,8 +16,6 @@ from pathlib import Path from typing import Any, cast -from patchright._impl._api_structures import SetCookieParam - from linkedin_mcp_server.core.auth import detect_auth_barrier, is_logged_in from linkedin_mcp_server.core.browser import BrowserManager @@ -260,7 +258,7 @@ async def run_debug(args: argparse.Namespace) -> dict[str, Any]: if args.clear_existing: await browser.context.clear_cookies() - await browser.context.add_cookies(cast(list[SetCookieParam], imported_cookies)) + await browser.context.add_cookies(cast(Any, imported_cookies)) await _capture_step( report, browser.page, diff --git a/tests/conftest.py b/tests/conftest.py index a329336c..e8102a38 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,6 +18,7 @@ def reset_singletons(): def isolate_profile_dir(tmp_path, monkeypatch): """Redirect profile directory to tmp_path via config and DEFAULT_PROFILE_DIR.""" fake_profile = tmp_path / "profile" + monkeypatch.setenv("USER_DATA_DIR", str(fake_profile)) # Patch DEFAULT_PROFILE_DIR for any code still referencing the constant for module in [ diff --git a/tests/test_authentication.py b/tests/test_authentication.py index 96fcb6c3..11798c7b 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -12,6 +12,7 @@ from linkedin_mcp_server.session_state import ( portable_cookie_path, runtime_profile_dir, + runtime_storage_state_path, runtime_state_path, source_state_path, ) @@ -81,6 +82,11 @@ def test_clear_auth_state_removes_source_and_runtime_files(profile_dir): _write_source_metadata(profile_dir) runtime_profile = runtime_profile_dir("linux-amd64-container", profile_dir) runtime_profile.mkdir(parents=True) + storage_state_path = runtime_storage_state_path( + "linux-amd64-container", profile_dir + ) + storage_state_path.parent.mkdir(parents=True, exist_ok=True) + storage_state_path.write_text("{}") runtime_state_path("linux-amd64-container", profile_dir).write_text( json.dumps( { @@ -89,7 +95,10 @@ def test_clear_auth_state_removes_source_and_runtime_files(profile_dir): "source_runtime_id": "macos-arm64-host", "source_login_generation": "gen-1", "created_at": "2026-03-12T17:10:00Z", + "committed_at": "2026-03-12T17:10:05Z", "profile_path": str(runtime_profile), + "storage_state_path": str(storage_state_path), + "commit_method": "checkpoint_restart", } ) ) diff --git a/tests/test_debug_trace.py b/tests/test_debug_trace.py index 5a4771af..7907457e 100644 --- a/tests/test_debug_trace.py +++ b/tests/test_debug_trace.py @@ -1,7 +1,13 @@ +import json +from unittest.mock import AsyncMock, MagicMock + +import pytest + from linkedin_mcp_server.debug_trace import ( cleanup_trace_dir, get_trace_dir, mark_trace_for_retention, + record_page_trace, reset_trace_state_for_testing, ) @@ -62,3 +68,35 @@ def test_trace_mode_off_disables_trace_dir(monkeypatch, tmp_path): monkeypatch.setenv("LINKEDIN_TRACE_MODE", "off") assert get_trace_dir() is None + + +@pytest.mark.asyncio +async def test_reset_trace_state_resets_step_counter(monkeypatch, tmp_path): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + + page = MagicMock() + page.url = "https://www.linkedin.com/feed/" + page.title = AsyncMock(return_value="LinkedIn") + page.evaluate = AsyncMock(return_value="Feed") + locator = MagicMock() + locator.count = AsyncMock(return_value=0) + page.locator = MagicMock(return_value=locator) + page.context.cookies = AsyncMock(return_value=[]) + page.screenshot = AsyncMock() + + await record_page_trace(page, "first") + trace_dir = get_trace_dir() + assert trace_dir is not None + first_payload = json.loads((trace_dir / "trace.jsonl").read_text().splitlines()[0]) + assert first_payload["step_id"] == 1 + + reset_trace_state_for_testing() + monkeypatch.setenv("USER_DATA_DIR", str((tmp_path / "second") / "profile")) + + await record_page_trace(page, "first-again") + second_trace_dir = get_trace_dir() + assert second_trace_dir is not None + second_payload = json.loads( + (second_trace_dir / "trace.jsonl").read_text().splitlines()[0] + ) + assert second_payload["step_id"] == 1 diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index c1631ef6..f82c51c2 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -1,3 +1,5 @@ +import pytest + from linkedin_mcp_server.error_diagnostics import ( build_issue_diagnostics, format_tool_error_with_diagnostics, @@ -76,3 +78,54 @@ def test_find_existing_issues_query_failure_is_tolerated(monkeypatch, tmp_path): ) assert diagnostics["existing_issues"] == [] + + +@pytest.mark.asyncio +async def test_build_issue_diagnostics_skips_network_search_in_event_loop( + monkeypatch, tmp_path +): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + + called = {"value": False} + + def fail(*args, **kwargs): + called["value"] = True + raise AssertionError("urlopen should not be called inside the event loop") + + monkeypatch.setattr("linkedin_mcp_server.error_diagnostics.urlopen", fail) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="extract-page", + target_url="https://www.linkedin.com/in/test/", + section_name="main_profile", + ) + + assert diagnostics["existing_issues"] == [] + assert called["value"] is False + + +def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( + monkeypatch, tmp_path +): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics.get_runtime_id", + lambda: "linux-amd64-container", + ) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics._find_existing_issues", + lambda payload: [], + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="search_jobs", + target_url="https://www.linkedin.com/jobs/search/?keywords=python", + section_name="search_results", + ) + + issue_body = diagnostics["issue_template"] + assert "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" in issue_body + assert "- [x] Docker" in issue_body + assert " - [x] search_jobs" in issue_body From fbaf95eddeaa4ce4f4cf608ed55d6431b126c151 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 01:09:27 +0100 Subject: [PATCH 503/565] fix(runtime): tighten diagnostics exposure --- AGENTS.md | 2 +- README.md | 2 +- linkedin_mcp_server/error_diagnostics.py | 36 ++++++++++++++++++----- linkedin_mcp_server/session_state.py | 37 ++++++++++++++++++------ scripts/debug_cookie_bridge.py | 4 +-- tests/test_error_diagnostics.py | 33 +++++++++++++++++++-- tests/test_session_state.py | 22 ++++++++++++++ 7 files changed, 114 insertions(+), 22 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index eca8c4a8..1c956f82 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -71,7 +71,7 @@ All scraping tools return: `{url, sections: {name: raw_text}}`. Tools may also include: - `references: {section_name: [{kind, url, text?, context?}, ...]}` โ€” compact typed link targets for graph expansion. LinkedIn URLs are relative paths such as `/in/stickerdaniel/`; external URLs remain absolute. -- `section_errors: {section_name: {error_type, error_message, issue_template_path, issue_template, runtime, ...}}` when one section failed but the overall tool call still completed. These diagnostics include trace/log locations and an issue-ready markdown template. +- `section_errors: {section_name: {error_type, error_message, issue_template_path, runtime, ...}}` when one section failed but the overall tool call still completed. These diagnostics include a compact runtime summary plus trace/log locations; the full issue-ready markdown template is written to `issue_template_path`. - `unknown_sections: [name, ...]` when unknown section names were passed. - `job_ids: [id, ...]` for `search_jobs`. diff --git a/README.md b/README.md index 899efd66..2b1f9206 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company Tool responses keep readable `sections` text and may also include a compact `references` map keyed by section. Each reference includes a typed target, a relative LinkedIn path (or absolute external URL), and a short label/context when available. -When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, runtime/session details, trace/log locations, matching-open-issue hints when available, and an issue-ready markdown template path. +When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, a compact runtime summary, trace/log locations, matching-open-issue hints when available, and the path to a generated issue-ready markdown report with the full session details. > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 947313c7..9a74247c 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -36,7 +36,7 @@ def build_issue_diagnostics( target_url: str | None = None, section_name: str | None = None, ) -> dict[str, Any]: - """Write an issue-ready report and return structured diagnostics.""" + """Write an issue-ready report and return MCP-safe diagnostics.""" timestamp = utcnow_iso() source_profile_dir = _safe_source_profile_dir() current_runtime_id = get_runtime_id() @@ -87,9 +87,7 @@ def build_issue_diagnostics( payload["existing_issues"] = _find_existing_issues(payload) issue_template = _render_issue_template(payload) issue_path.write_text(issue_template) - payload["issue_template_path"] = str(issue_path) - payload["issue_template"] = issue_template - return payload + return _public_issue_diagnostics(payload, issue_path=issue_path) def format_tool_error_with_diagnostics( @@ -106,9 +104,10 @@ def format_tool_error_with_diagnostics( lines.append(f"- Server log: {runtime['log_path']}") if runtime.get("suggested_gist_command"): lines.append(f"- Suggested gist command: {runtime['suggested_gist_command']}") - lines.append( - f"- Runtime: {runtime.get('current_runtime_id', 'unknown')} on {runtime.get('hostname', 'unknown')}" - ) + runtime_summary = f"- Runtime: {runtime.get('current_runtime_id', 'unknown')}" + if runtime.get("hostname"): + runtime_summary += f" on {runtime['hostname']}" + lines.append(runtime_summary) existing_issues = diagnostics.get("existing_issues") or [] if existing_issues: lines.append("- Matching open issues were found. Review them first:") @@ -233,6 +232,29 @@ def _render_issue_template(payload: dict[str, Any]) -> str: ) +def _public_issue_diagnostics( + payload: dict[str, Any], *, issue_path: Path +) -> dict[str, Any]: + runtime = payload["runtime"] + return { + "created_at": payload["created_at"], + "context": payload["context"], + "section_name": payload["section_name"], + "target_url": payload["target_url"], + "error_type": payload["error_type"], + "error_message": payload["error_message"], + "suggested_issue_title": payload["suggested_issue_title"], + "existing_issues": payload["existing_issues"], + "issue_template_path": str(issue_path), + "runtime": { + "current_runtime_id": runtime["current_runtime_id"], + "trace_dir": runtime["trace_dir"], + "log_path": runtime["log_path"], + "suggested_gist_command": runtime["suggested_gist_command"], + }, + } + + def _safe_source_profile_dir(): try: return get_source_profile_dir() diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py index 4b0a1af7..b6d2d4a8 100644 --- a/linkedin_mcp_server/session_state.py +++ b/linkedin_mcp_server/session_state.py @@ -127,22 +127,41 @@ def _normalize_arch(machine: str) -> str: def _is_container_runtime() -> bool: - if Path("/.dockerenv").exists(): + if any( + path.exists() + for path in ( + Path("/.dockerenv"), + Path("/run/.containerenv"), + Path("/run/containerenv"), + ) + ): return True - cgroup = Path("/proc/1/cgroup") - if cgroup.exists(): - try: - text = cgroup.read_text() - except OSError: - text = "" - markers = ("docker", "containerd", "kubepods", "podman") - if any(marker in text for marker in markers): + markers = ("docker", "containerd", "kubepods", "podman", "libpod", "overlay") + for probe in ( + Path("/proc/1/cgroup"), + Path("/proc/self/cgroup"), + Path("/proc/1/mountinfo"), + Path("/proc/self/mountinfo"), + ): + if _path_contains_markers(probe, markers): return True return False +def _path_contains_markers(path: Path, markers: tuple[str, ...]) -> bool: + if not path.exists(): + return False + + try: + text = path.read_text(encoding="utf-8", errors="ignore").lower() + except OSError: + return False + + return any(marker in text for marker in markers) + + def load_source_state(source_profile_dir: Path | None = None) -> SourceState | None: """Load the source session metadata if present.""" data = _load_json(source_state_path(source_profile_dir)) diff --git a/scripts/debug_cookie_bridge.py b/scripts/debug_cookie_bridge.py index 80ab8e13..ac6370ff 100644 --- a/scripts/debug_cookie_bridge.py +++ b/scripts/debug_cookie_bridge.py @@ -10,12 +10,12 @@ import argparse import asyncio import json -import re import shutil import tempfile from pathlib import Path from typing import Any, cast +from linkedin_mcp_server.common_utils import slugify_fragment from linkedin_mcp_server.core.auth import detect_auth_barrier, is_logged_in from linkedin_mcp_server.core.browser import BrowserManager @@ -154,7 +154,7 @@ async def capture_page_state(page, *, body_lines: int) -> dict[str, Any]: def _slugify_step(step: str) -> str: - return re.sub(r"[^a-z0-9]+", "-", step.lower()).strip("-") + return slugify_fragment(step) def _resolve_artifact_dir(args: argparse.Namespace) -> Path | None: diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index f82c51c2..3f2dbeb0 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from linkedin_mcp_server.error_diagnostics import ( @@ -29,7 +31,9 @@ def test_build_issue_diagnostics_includes_existing_issues(monkeypatch, tmp_path) assert diagnostics["existing_issues"][0]["number"] == 220 assert diagnostics["section_name"] == "posts" assert diagnostics["runtime"]["trace_dir"] is not None - issue_body = diagnostics["issue_template"] + assert "issue_template" not in diagnostics + assert "hostname" not in diagnostics["runtime"] + issue_body = Path(diagnostics["issue_template_path"]).read_text() assert "## Existing Open Issues" in issue_body assert "#220" in issue_body assert "post the gist as a comment there" in issue_body @@ -125,7 +129,32 @@ def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( section_name="search_results", ) - issue_body = diagnostics["issue_template"] + issue_body = Path(diagnostics["issue_template_path"]).read_text() assert "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" in issue_body assert "- [x] Docker" in issue_body assert " - [x] search_jobs" in issue_body + + +def test_build_issue_diagnostics_keeps_sensitive_runtime_details_out_of_mcp_payload( + monkeypatch, tmp_path +): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics._find_existing_issues", + lambda payload: [], + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="extract-page", + target_url="https://www.linkedin.com/in/test/", + section_name="main_profile", + ) + + assert diagnostics["issue_template_path"] + assert "issue_template" not in diagnostics + assert "hostname" not in diagnostics["runtime"] + assert "source_profile_dir" not in diagnostics["runtime"] + issue_body = Path(diagnostics["issue_template_path"]).read_text() + assert "## Runtime Diagnostics" in issue_body + assert "Source profile:" in issue_body diff --git a/tests/test_session_state.py b/tests/test_session_state.py index f30bf4ba..49187122 100644 --- a/tests/test_session_state.py +++ b/tests/test_session_state.py @@ -83,3 +83,25 @@ def test_get_runtime_id_marks_container(monkeypatch): ) assert get_runtime_id() == "linux-amd64-container" + + +def test_get_runtime_id_marks_container_from_cgroup_v2_mountinfo(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.system", lambda: "Linux" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.machine", lambda: "x86_64" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.Path.exists", + lambda self: str(self) == "/proc/1/mountinfo", + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.Path.read_text", + lambda self, *args, **kwargs: ( + "257 248 0:61 / / rw,relatime - overlay overlay " + "rw,lowerdir=/var/lib/docker/overlay2/l" + ), + ) + + assert get_runtime_id() == "linux-amd64-container" From 6351dc568ce0c5d4563ee067b63923abca79dbfb Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 01:23:39 +0100 Subject: [PATCH 504/565] fix(trace): clean up cli trace lifecycle --- linkedin_mcp_server/core/auth.py | 5 +- linkedin_mcp_server/debug_trace.py | 2 +- linkedin_mcp_server/drivers/browser.py | 3 ++ linkedin_mcp_server/error_diagnostics.py | 2 +- linkedin_mcp_server/logging_config.py | 7 ++- linkedin_mcp_server/session_state.py | 3 +- tests/conftest.py | 2 + tests/test_logging_config.py | 62 ++++++++++++++++++++++++ tests/test_session_state.py | 28 +++++++++++ 9 files changed, 108 insertions(+), 6 deletions(-) create mode 100644 tests/test_logging_config.py diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index d8a73706..8f5c2f6d 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -270,7 +270,8 @@ async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: "Waiting up to 5 minutes..." ) - start_time = asyncio.get_event_loop().time() + loop = asyncio.get_running_loop() + start_time = loop.time() while True: if await resolve_remember_me_prompt(page): @@ -281,7 +282,7 @@ async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: logger.info("Manual login completed successfully") return - elapsed = (asyncio.get_event_loop().time() - start_time) * 1000 + elapsed = (loop.time() - start_time) * 1000 if elapsed > timeout: raise AuthenticationError( "Manual login timeout. Please try again and complete login faster." diff --git a/linkedin_mcp_server/debug_trace.py b/linkedin_mcp_server/debug_trace.py index c26162ac..6bb295b6 100644 --- a/linkedin_mcp_server/debug_trace.py +++ b/linkedin_mcp_server/debug_trace.py @@ -110,7 +110,7 @@ def _slugify_step(step: str) -> str: def _safe_source_profile_dir() -> Path: try: return get_source_profile_dir() - except BaseException: + except Exception: return Path(os.getenv("USER_DATA_DIR", "~/.linkedin-mcp/profile")).expanduser() diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 5d23bef6..b6854bf6 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -19,6 +19,7 @@ resolve_remember_me_prompt, ) +from linkedin_mcp_server.common_utils import utcnow_iso from linkedin_mcp_server.config import get_config from linkedin_mcp_server.debug_trace import record_page_trace from linkedin_mcp_server.debug_utils import stabilize_navigation @@ -242,6 +243,7 @@ async def _bridge_runtime_profile( persist_runtime: bool, cookie_preset: str = "auth_minimal", ) -> BrowserManager: + bridge_started_at = utcnow_iso() clear_runtime_profile(runtime_id, get_source_profile_dir()) profile_dir.parent.mkdir(parents=True, exist_ok=True) storage_state_path = runtime_storage_state_path( @@ -328,6 +330,7 @@ async def _bridge_runtime_profile( source_state, storage_state_path, get_source_profile_dir(), + created_at=bridge_started_at, ) logger.info("Derived runtime profile committed for %s", runtime_id) reopened.is_authenticated = True diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 9a74247c..4b00bff7 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -258,7 +258,7 @@ def _public_issue_diagnostics( def _safe_source_profile_dir(): try: return get_source_profile_dir() - except BaseException: + except Exception: return (Path.home() / ".linkedin-mcp" / "profile").expanduser() diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 7084a37a..8323a460 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -7,6 +7,7 @@ Includes proper logger hierarchy and external library noise reduction. """ +import atexit import json import logging from typing import Any, Dict @@ -14,6 +15,7 @@ from linkedin_mcp_server.debug_trace import cleanup_trace_dir, get_trace_dir _TRACE_FILE_HANDLER: logging.Handler | None = None +_TRACE_CLEANUP_REGISTERED = False class MCPJSONFormatter(logging.Formatter): @@ -110,7 +112,7 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> except Exception: pass - global _TRACE_FILE_HANDLER + global _TRACE_CLEANUP_REGISTERED, _TRACE_FILE_HANDLER _TRACE_FILE_HANDLER = None # Add console handler @@ -125,6 +127,9 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> file_handler.setFormatter(formatter) root_logger.addHandler(file_handler) _TRACE_FILE_HANDLER = file_handler + if not _TRACE_CLEANUP_REGISTERED: + atexit.register(teardown_trace_logging) + _TRACE_CLEANUP_REGISTERED = True # Set specific loggers to reduce noise logging.getLogger("urllib3").setLevel(logging.ERROR) diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py index b6d2d4a8..c105043f 100644 --- a/linkedin_mcp_server/session_state.py +++ b/linkedin_mcp_server/session_state.py @@ -211,6 +211,7 @@ def write_runtime_state( storage_state_path: Path, source_profile_dir: Path | None = None, *, + created_at: str | None = None, commit_method: str = "checkpoint_restart", ) -> RuntimeState: """Write metadata for a derived runtime session.""" @@ -221,7 +222,7 @@ def write_runtime_state( runtime_id=runtime_id, source_runtime_id=source_state.source_runtime_id, source_login_generation=source_state.login_generation, - created_at=committed_at, + created_at=created_at or committed_at, committed_at=committed_at, profile_path=str(profile_dir), storage_state_path=str(storage_state_path.resolve()), diff --git a/tests/conftest.py b/tests/conftest.py index e8102a38..a3845335 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,6 +56,8 @@ def isolate_profile_dir(tmp_path, monkeypatch): for source_module in [ "linkedin_mcp_server.authentication", "linkedin_mcp_server.drivers.browser", + "linkedin_mcp_server.debug_trace", + "linkedin_mcp_server.error_diagnostics", ]: try: monkeypatch.setattr( diff --git a/tests/test_logging_config.py b/tests/test_logging_config.py new file mode 100644 index 00000000..7fe8bee7 --- /dev/null +++ b/tests/test_logging_config.py @@ -0,0 +1,62 @@ +import logging + +from linkedin_mcp_server.debug_trace import get_trace_dir, reset_trace_state_for_testing +from linkedin_mcp_server.logging_config import configure_logging, teardown_trace_logging + + +def setup_function(): + reset_trace_state_for_testing() + + +def teardown_function(): + teardown_trace_logging() + reset_trace_state_for_testing() + + +def test_configure_logging_registers_trace_cleanup_once(monkeypatch, tmp_path): + registrations = [] + + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.logging_config.atexit.register", + lambda fn: registrations.append(fn), + ) + monkeypatch.setattr( + "linkedin_mcp_server.logging_config._TRACE_CLEANUP_REGISTERED", + False, + ) + + configure_logging() + configure_logging() + + assert registrations == [teardown_trace_logging] + + +def test_registered_trace_cleanup_removes_ephemeral_trace_dir(monkeypatch, tmp_path): + registrations = [] + + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.logging_config.atexit.register", + lambda fn: registrations.append(fn), + ) + monkeypatch.setattr( + "linkedin_mcp_server.logging_config._TRACE_CLEANUP_REGISTERED", + False, + ) + + configure_logging() + trace_dir = get_trace_dir() + + assert trace_dir is not None + assert trace_dir.exists() + assert registrations == [teardown_trace_logging] + + registrations[0]() + + assert not trace_dir.exists() + assert not any( + handler + for handler in logging.getLogger().handlers + if isinstance(handler, logging.FileHandler) + ) diff --git a/tests/test_session_state.py b/tests/test_session_state.py index 49187122..e11f3079 100644 --- a/tests/test_session_state.py +++ b/tests/test_session_state.py @@ -58,6 +58,34 @@ def test_write_runtime_state_tracks_source_generation(monkeypatch, isolate_profi ) +def test_write_runtime_state_accepts_explicit_created_at( + monkeypatch, isolate_profile_dir +): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_runtime_id", + lambda: "macos-arm64-host", + ) + source_state = write_source_state(isolate_profile_dir) + + storage_state_path = runtime_storage_state_path( + "linux-amd64-container", + isolate_profile_dir, + ) + storage_state_path.parent.mkdir(parents=True, exist_ok=True) + storage_state_path.write_text("{}") + + runtime_state = write_runtime_state( + "linux-amd64-container", + source_state, + storage_state_path, + isolate_profile_dir, + created_at="2026-03-12T17:09:00Z", + ) + + assert runtime_state.created_at == "2026-03-12T17:09:00Z" + assert runtime_state.committed_at != runtime_state.created_at + + def test_runtime_storage_state_path_uses_runtime_dir(isolate_profile_dir): assert runtime_storage_state_path( "linux-amd64-container", From 4fe191cd2a384b7534d4de49c1312820c49b75d1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 01:36:02 +0100 Subject: [PATCH 505/565] fix(status): clarify bridge validation --- linkedin_mcp_server/cli_main.py | 3 +++ linkedin_mcp_server/session_state.py | 32 ++++++++++++++++++++++++++-- scripts/debug_cookie_bridge.py | 5 ++++- tests/test_cli_main.py | 1 + tests/test_session_state.py | 23 ++++++++++++++++++++ 5 files changed, 61 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index 242978c3..f25a7d22 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -216,6 +216,9 @@ async def check_session() -> bool: print( "โ„น๏ธ A fresh bridged foreign-runtime session will be created on the next server startup." ) + print( + "โ„น๏ธ Source cookie validity is not verified in this mode. Run the server to test the bridge end-to-end." + ) sys.exit(0) try: diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py index c105043f..00eaaa20 100644 --- a/linkedin_mcp_server/session_state.py +++ b/linkedin_mcp_server/session_state.py @@ -137,14 +137,19 @@ def _is_container_runtime() -> bool: ): return True - markers = ("docker", "containerd", "kubepods", "podman", "libpod", "overlay") + markers = ("docker", "containerd", "kubepods", "podman", "libpod") for probe in ( Path("/proc/1/cgroup"), Path("/proc/self/cgroup"), + ): + if _path_contains_markers(probe, markers): + return True + + for probe in ( Path("/proc/1/mountinfo"), Path("/proc/self/mountinfo"), ): - if _path_contains_markers(probe, markers): + if _path_contains_markers(probe, markers) or _root_mount_uses_overlay(probe): return True return False @@ -162,6 +167,29 @@ def _path_contains_markers(path: Path, markers: tuple[str, ...]) -> bool: return any(marker in text for marker in markers) +def _root_mount_uses_overlay(path: Path) -> bool: + if not path.exists(): + return False + + try: + lines = path.read_text(encoding="utf-8", errors="ignore").splitlines() + except OSError: + return False + + for line in lines: + if " - " not in line: + continue + left, right = line.split(" - ", maxsplit=1) + left_fields = left.split() + right_fields = right.split() + if len(left_fields) < 5 or not right_fields: + continue + if left_fields[4] == "/" and right_fields[0] == "overlay": + return True + + return False + + def load_source_state(source_profile_dir: Path | None = None) -> SourceState | None: """Load the source session metadata if present.""" data = _load_json(source_state_path(source_profile_dir)) diff --git a/scripts/debug_cookie_bridge.py b/scripts/debug_cookie_bridge.py index ac6370ff..109e04cd 100644 --- a/scripts/debug_cookie_bridge.py +++ b/scripts/debug_cookie_bridge.py @@ -232,6 +232,7 @@ async def run_debug(args: argparse.Namespace) -> dict[str, Any]: report["artifact_dir"] = str(artifact_dir) browser = BrowserManager(user_data_dir=profile_dir, headless=True) + browser_closed = False try: await browser.start() await _capture_step( @@ -295,6 +296,7 @@ async def run_debug(args: argparse.Namespace) -> dict[str, Any]: ) report["storage_state_path"] = str(storage_state_path) await browser.close() + browser_closed = True reopened = BrowserManager(user_data_dir=profile_dir, headless=True) try: @@ -326,7 +328,8 @@ async def run_debug(args: argparse.Namespace) -> dict[str, Any]: await reopened.close() return report finally: - await browser.close() + if not browser_closed: + await browser.close() shutil.rmtree(temp_dir, ignore_errors=True) diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py index 9ed712b8..e428ff7a 100644 --- a/tests/test_cli_main.py +++ b/tests/test_cli_main.py @@ -211,6 +211,7 @@ def test_profile_info_reports_bridge_required_for_foreign_runtime( captured = capsys.readouterr() assert "fresh bridge each startup" in captured.out.lower() assert "fresh bridged foreign-runtime session" in captured.out.lower() + assert "source cookie validity is not verified" in captured.out.lower() def test_profile_info_reports_committed_derived_runtime( diff --git a/tests/test_session_state.py b/tests/test_session_state.py index e11f3079..c0b525bc 100644 --- a/tests/test_session_state.py +++ b/tests/test_session_state.py @@ -133,3 +133,26 @@ def test_get_runtime_id_marks_container_from_cgroup_v2_mountinfo(monkeypatch): ) assert get_runtime_id() == "linux-amd64-container" + + +def test_get_runtime_id_ignores_non_root_overlay_mounts(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.system", lambda: "Linux" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.platform.machine", lambda: "x86_64" + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.Path.exists", + lambda self: str(self) == "/proc/1/mountinfo", + ) + monkeypatch.setattr( + "linkedin_mcp_server.session_state.Path.read_text", + lambda self, *args, **kwargs: ( + "257 248 0:61 /var/lib/containers/storage/overlay " + "/var/lib/containers/storage/overlay rw,relatime - overlay overlay " + "rw,lowerdir=/var/lib/overlay-host/l" + ), + ) + + assert get_runtime_id() == "linux-amd64-host" From aee1a17f1938112a95139bc673023ea37edd42e5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 01:49:23 +0100 Subject: [PATCH 506/565] fix(debug): close reopened browser on failure --- linkedin_mcp_server/drivers/browser.py | 14 ++++---- linkedin_mcp_server/error_diagnostics.py | 23 ++++++++++--- tests/test_browser_driver.py | 41 ++++++++++++++++++++++++ tests/test_error_diagnostics.py | 6 ++++ 4 files changed, 72 insertions(+), 12 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index b6854bf6..65350181 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -308,14 +308,14 @@ async def _bridge_runtime_profile( launch_options=launch_options, viewport=viewport, ) - await reopened.start() - await stabilize_navigation("derived profile reopen", logger) - await record_page_trace( - reopened.page, - "bridge-after-profile-reopen", - extra={"profile_dir": str(profile_dir)}, - ) try: + await reopened.start() + await stabilize_navigation("derived profile reopen", logger) + await record_page_trace( + reopened.page, + "bridge-after-profile-reopen", + extra={"profile_dir": str(profile_dir)}, + ) if not await _feed_auth_succeeds(reopened): logger.warning( "Stored derived runtime profile failed post-commit validation" diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 4b00bff7..9b5d004d 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -84,7 +84,11 @@ def build_issue_diagnostics( current_runtime_id=current_runtime_id, ), } - payload["existing_issues"] = _find_existing_issues(payload) + payload["issue_search_skipped"] = _inside_running_event_loop() + if payload["issue_search_skipped"]: + payload["existing_issues"] = [] + else: + payload["existing_issues"] = _find_existing_issues(payload) issue_template = _render_issue_template(payload) issue_path.write_text(issue_template) return _public_issue_diagnostics(payload, issue_path=issue_path) @@ -117,6 +121,10 @@ def format_tool_error_with_diagnostics( "- If one matches this failure, upload the gist and post it as a comment on that issue instead of opening a new issue." ) else: + if diagnostics.get("issue_search_skipped"): + lines.append( + "- Matching open-issue search was skipped in async server context to avoid blocking the server event loop." + ) lines.append(f"- File the issue here: {ISSUE_URL}") lines.append( "- Read the generated issue template and attach the listed files before posting." @@ -128,6 +136,7 @@ def _render_issue_template(payload: dict[str, Any]) -> str: runtime = payload["runtime"] existing_issues = payload.get("existing_issues") or [] has_existing_issues = bool(existing_issues) + issue_search_skipped = bool(payload.get("issue_search_skipped")) installation_lines = _installation_method_lines(runtime) tool_lines = _tool_lines(payload) return ( @@ -153,7 +162,13 @@ def _render_issue_template(payload: dict[str, Any]) -> str: for issue in existing_issues ] if has_existing_issues - else ["- No matching open issues found during diagnostics."] + else ( + [ + "- Matching open-issue search was skipped in async server context to avoid blocking the server event loop." + ] + if issue_search_skipped + else ["- No matching open issues found during diagnostics."] + ) ), "", "## Installation Method", @@ -245,6 +260,7 @@ def _public_issue_diagnostics( "error_message": payload["error_message"], "suggested_issue_title": payload["suggested_issue_title"], "existing_issues": payload["existing_issues"], + "issue_search_skipped": payload["issue_search_skipped"], "issue_template_path": str(issue_path), "runtime": { "current_runtime_id": runtime["current_runtime_id"], @@ -294,9 +310,6 @@ def _build_gist_command( def _find_existing_issues(payload: dict[str, Any]) -> list[dict[str, Any]]: - if _inside_running_event_loop(): - return [] - query = _issue_search_query(payload) if not query: return [] diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index c9175ae0..c6713cd0 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -516,6 +516,47 @@ async def test_experimental_checkpoint_reopen_failure_clears_runtime_dir( assert not runtime_profile_dir( "linux-amd64-container", tmp_path / "profile" ).exists() + reopened_browser.close.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_experimental_reopen_start_failure_closes_reopened_browser( + tmp_path, monkeypatch +): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + reopened_browser = _make_mock_browser() + reopened_browser.start = AsyncMock(side_effect=RuntimeError("reopen failed")) + monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + side_effect=[first_browser, reopened_browser], + ), + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + pytest.raises(RuntimeError, match="reopen failed"), + ): + await get_or_create_browser() + + reopened_browser.close.assert_awaited_once() + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + assert not runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ).exists() @pytest.mark.asyncio diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index 3f2dbeb0..56598745 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -29,6 +29,7 @@ def test_build_issue_diagnostics_includes_existing_issues(monkeypatch, tmp_path) ) assert diagnostics["existing_issues"][0]["number"] == 220 + assert diagnostics["issue_search_skipped"] is False assert diagnostics["section_name"] == "posts" assert diagnostics["runtime"]["trace_dir"] is not None assert "issue_template" not in diagnostics @@ -82,6 +83,7 @@ def test_find_existing_issues_query_failure_is_tolerated(monkeypatch, tmp_path): ) assert diagnostics["existing_issues"] == [] + assert diagnostics["issue_search_skipped"] is False @pytest.mark.asyncio @@ -106,7 +108,10 @@ def fail(*args, **kwargs): ) assert diagnostics["existing_issues"] == [] + assert diagnostics["issue_search_skipped"] is True assert called["value"] is False + issue_body = Path(diagnostics["issue_template_path"]).read_text() + assert "search was skipped in async server context" in issue_body def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( @@ -155,6 +160,7 @@ def test_build_issue_diagnostics_keeps_sensitive_runtime_details_out_of_mcp_payl assert "issue_template" not in diagnostics assert "hostname" not in diagnostics["runtime"] assert "source_profile_dir" not in diagnostics["runtime"] + assert diagnostics["issue_search_skipped"] is False issue_body = Path(diagnostics["issue_template_path"]).read_text() assert "## Runtime Diagnostics" in issue_body assert "Source profile:" in issue_body From 6aee393b3b251f19e9162444dd39a3cd8bb0a332 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 02:02:52 +0100 Subject: [PATCH 507/565] fix(setup): guard source-state export --- linkedin_mcp_server/setup.py | 7 +-- tests/test_setup.py | 96 ++++++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 tests/test_setup.py diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 407b4fed..2e8def0c 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -82,9 +82,10 @@ async def interactive_login( # first successful /feed/ recovery instead of relying on browser teardown. if await browser.export_cookies(): print(" Cookies exported for Docker portability") - - source_state = write_source_state(user_data_dir) - print(f" Source session generation: {source_state.login_generation}") + source_state = write_source_state(user_data_dir) + print(f" Source session generation: {source_state.login_generation}") + else: + print(" Warning: cookie export failed; Docker bridge may not work") print(f"Profile saved to {user_data_dir}") return True diff --git a/tests/test_setup.py b/tests/test_setup.py new file mode 100644 index 00000000..4768da93 --- /dev/null +++ b/tests/test_setup.py @@ -0,0 +1,96 @@ +from types import SimpleNamespace +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from linkedin_mcp_server.setup import interactive_login + + +class _BrowserContextManager: + def __init__(self, browser): + self.browser = browser + + async def __aenter__(self): + return self.browser + + async def __aexit__(self, exc_type, exc, tb): + return None + + +def _make_browser(*, export_cookies: bool) -> MagicMock: + browser = MagicMock() + browser.page = MagicMock() + browser.page.goto = AsyncMock() + browser.context = MagicMock() + browser.context.cookies = AsyncMock( + return_value=[{"name": "li_at", "domain": ".linkedin.com"}] + ) + browser.export_cookies = AsyncMock(return_value=export_cookies) + return browser + + +@pytest.mark.asyncio +async def test_interactive_login_writes_source_state_when_cookie_export_succeeds( + monkeypatch, tmp_path, capsys +): + browser = _make_browser(export_cookies=True) + write_source_state = MagicMock( + return_value=SimpleNamespace(login_generation="gen-123") + ) + + monkeypatch.setattr( + "linkedin_mcp_server.setup.BrowserManager", + lambda **kwargs: _BrowserContextManager(browser), + ) + monkeypatch.setattr("linkedin_mcp_server.setup.warm_up_browser", AsyncMock()) + monkeypatch.setattr( + "linkedin_mcp_server.setup.resolve_remember_me_prompt", + AsyncMock(return_value=False), + ) + monkeypatch.setattr( + "linkedin_mcp_server.setup.wait_for_manual_login", + AsyncMock(), + ) + monkeypatch.setattr( + "linkedin_mcp_server.setup.write_source_state", write_source_state + ) + monkeypatch.setattr("linkedin_mcp_server.setup.asyncio.sleep", AsyncMock()) + + assert await interactive_login(tmp_path / "profile") is True + + write_source_state.assert_called_once_with(tmp_path / "profile") + captured = capsys.readouterr() + assert "cookies exported for docker portability" in captured.out.lower() + assert "source session generation: gen-123" in captured.out.lower() + + +@pytest.mark.asyncio +async def test_interactive_login_skips_source_state_when_cookie_export_fails( + monkeypatch, tmp_path, capsys +): + browser = _make_browser(export_cookies=False) + write_source_state = MagicMock() + + monkeypatch.setattr( + "linkedin_mcp_server.setup.BrowserManager", + lambda **kwargs: _BrowserContextManager(browser), + ) + monkeypatch.setattr("linkedin_mcp_server.setup.warm_up_browser", AsyncMock()) + monkeypatch.setattr( + "linkedin_mcp_server.setup.resolve_remember_me_prompt", + AsyncMock(return_value=False), + ) + monkeypatch.setattr( + "linkedin_mcp_server.setup.wait_for_manual_login", + AsyncMock(), + ) + monkeypatch.setattr( + "linkedin_mcp_server.setup.write_source_state", write_source_state + ) + monkeypatch.setattr("linkedin_mcp_server.setup.asyncio.sleep", AsyncMock()) + + assert await interactive_login(tmp_path / "profile") is True + + write_source_state.assert_not_called() + captured = capsys.readouterr() + assert "warning: cookie export failed" in captured.out.lower() From 5da1e7ee17107274268bf0b0535ce8b91e1ddb7d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 08:08:34 +0100 Subject: [PATCH 508/565] fix(auth): tighten remember-me retries --- linkedin_mcp_server/drivers/browser.py | 9 ++- linkedin_mcp_server/scraping/extractor.py | 23 ++++++- tests/test_browser_driver.py | 26 +++++++ tests/test_scraping.py | 83 +++++++++++++++++++++++ 4 files changed, 137 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 65350181..c972adcd 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -163,14 +163,17 @@ async def _feed_auth_succeeds( await stabilize_navigation( "remember-me resolution after feed failure", logger ) + await record_page_trace( + browser.page, + "feed-navigation-error-before-remember-me-retry", + extra={"error": f"{type(exc).__name__}: {exc}"}, + ) await record_page_trace( browser.page, "feed-after-remember-me-error-recovery", extra={"error": f"{type(exc).__name__}: {exc}"}, ) - barrier = await detect_auth_barrier_quick(browser.page) - if barrier is None: - return True + return await _feed_auth_succeeds(browser, allow_remember_me=False) await record_page_trace( browser.page, "feed-navigation-error", diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 514946d6..0b70da8f 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -243,6 +243,7 @@ async def _goto_with_auth_checks( ) -> None: """Navigate to a LinkedIn page and fail fast on auth barriers.""" hops: list[str] = [] + listener_registered = False def record_navigation(frame: Any) -> None: if frame != self._page.main_frame: @@ -251,7 +252,15 @@ def record_navigation(frame: Any) -> None: if frame_url and (not hops or hops[-1] != frame_url): hops.append(frame_url) + def unregister_navigation_listener() -> None: + nonlocal listener_registered + if not listener_registered: + return + self._page.remove_listener("framenavigated", record_navigation) + listener_registered = False + self._page.on("framenavigated", record_navigation) + listener_registered = True try: await record_page_trace( self._page, @@ -271,6 +280,16 @@ def record_navigation(frame: Any) -> None: await stabilize_navigation( f"remember-me resolution for {url}", logger ) + await record_page_trace( + self._page, + "extractor-navigation-error-before-remember-me-retry", + extra={ + "target_url": url, + "wait_until": wait_until, + "error": f"{type(exc).__name__}: {exc}", + "hops": hops, + }, + ) await record_page_trace( self._page, "extractor-after-remember-me", @@ -279,6 +298,7 @@ def record_navigation(frame: Any) -> None: "error": f"{type(exc).__name__}: {exc}", }, ) + unregister_navigation_listener() await self._goto_with_auth_checks( url, wait_until=wait_until, @@ -310,6 +330,7 @@ def record_navigation(frame: Any) -> None: "extractor-after-remember-me-retry", extra={"target_url": url, "barrier": barrier}, ) + unregister_navigation_listener() await self._goto_with_auth_checks( url, wait_until=wait_until, @@ -328,7 +349,7 @@ def record_navigation(frame: Any) -> None: "Run with --login and complete the account selection/sign-in flow." ) finally: - self._page.remove_listener("framenavigated", record_navigation) + unregister_navigation_listener() async def _navigate_to_page(self, url: str) -> None: """Navigate to a LinkedIn page and fail fast on auth barriers.""" diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index c6713cd0..83bce2d1 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -7,6 +7,7 @@ from linkedin_mcp_server.config.schema import AppConfig from linkedin_mcp_server.drivers.browser import ( + _feed_auth_succeeds, get_or_create_browser, reset_browser_for_testing, ) @@ -179,6 +180,31 @@ async def test_same_runtime_clicks_remember_me_during_feed_validation(tmp_path): assert remember_me.await_count == 1 +@pytest.mark.asyncio +async def test_feed_auth_retries_feed_after_remember_me_error_recovery(): + browser = _make_mock_browser() + browser.page.goto = AsyncMock( + side_effect=[Exception("net::ERR_TOO_MANY_REDIRECTS"), None] + ) + + with ( + patch( + "linkedin_mcp_server.drivers.browser.resolve_remember_me_prompt", + new_callable=AsyncMock, + return_value=True, + ) as remember_me, + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + assert await _feed_auth_succeeds(browser) is True + + assert browser.page.goto.await_count == 2 + remember_me.assert_awaited_once() + + @pytest.mark.asyncio async def test_experimental_derived_runtime_reuses_matching_committed_profile( tmp_path, monkeypatch diff --git a/tests/test_scraping.py b/tests/test_scraping.py index ebe32269..0abaf39e 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -417,6 +417,89 @@ async def goto_side_effect(*args, **kwargs): assert mock_page.goto.await_count == 2 mock_resolve.assert_awaited_once() + async def test_goto_with_auth_checks_unhooks_outer_listener_before_retry( + self, mock_page + ): + extractor = LinkedInExtractor(mock_page) + listener_events: list[str] = [] + + def record_on(event_name, callback): + listener_events.append(f"on:{event_name}") + + def record_remove(event_name, callback): + listener_events.append(f"off:{event_name}") + + mock_page.on.side_effect = record_on + mock_page.remove_listener.side_effect = record_remove + + with ( + patch( + "linkedin_mcp_server.scraping.extractor.resolve_remember_me_prompt", + new_callable=AsyncMock, + return_value=True, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_auth_barrier_quick", + new_callable=AsyncMock, + side_effect=["account picker", None], + ), + ): + await extractor._goto_with_auth_checks( + "https://www.linkedin.com/in/testuser/" + ) + + assert listener_events == [ + "on:framenavigated", + "off:framenavigated", + "on:framenavigated", + "off:framenavigated", + ] + + async def test_goto_with_auth_checks_records_original_failure_before_retry( + self, mock_page + ): + extractor = LinkedInExtractor(mock_page) + mock_page.goto = AsyncMock( + side_effect=[ + Exception("net::ERR_TOO_MANY_REDIRECTS"), + Exception("retry failed"), + ] + ) + + with ( + patch( + "linkedin_mcp_server.scraping.extractor.resolve_remember_me_prompt", + new_callable=AsyncMock, + side_effect=[True, False], + ), + patch( + "linkedin_mcp_server.scraping.extractor.record_page_trace", + new_callable=AsyncMock, + ) as mock_trace, + patch( + "linkedin_mcp_server.scraping.extractor.detect_auth_barrier", + new_callable=AsyncMock, + return_value=None, + ), + pytest.raises(Exception, match="retry failed"), + ): + await extractor._goto_with_auth_checks( + "https://www.linkedin.com/in/testuser/" + ) + + trace_steps = [call.args[1] for call in mock_trace.await_args_list] + assert "extractor-navigation-error-before-remember-me-retry" in trace_steps + + trace_call = next( + call + for call in mock_trace.await_args_list + if call.args[1] == "extractor-navigation-error-before-remember-me-retry" + ) + assert ( + trace_call.kwargs["extra"]["error"] + == "Exception: net::ERR_TOO_MANY_REDIRECTS" + ) + async def test_goto_with_auth_checks_logs_failure_context(self, mock_page): extractor = LinkedInExtractor(mock_page) mock_page.goto = AsyncMock(side_effect=Exception("net::ERR_TOO_MANY_REDIRECTS")) From 14c16c3b425a73070164968257d8b1e4cd80a309 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 08:20:48 +0100 Subject: [PATCH 509/565] refactor(diagnostics): trim dead runtime detail --- linkedin_mcp_server/drivers/browser.py | 6 +++--- linkedin_mcp_server/error_diagnostics.py | 5 +---- tests/test_error_diagnostics.py | 2 ++ 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index c972adcd..acaf5713 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -183,7 +183,7 @@ async def _feed_auth_succeeds( return False -def _launch_options() -> tuple[dict[str, str], dict[str, int], object]: +def _launch_options() -> tuple[dict[str, str], dict[str, int]]: config = get_config() viewport = { "width": config.browser.viewport_width, @@ -193,7 +193,7 @@ def _launch_options() -> tuple[dict[str, str], dict[str, int], object]: if config.browser.chrome_path: launch_options["executable_path"] = config.browser.chrome_path logger.info("Using custom Chrome path: %s", config.browser.chrome_path) - return launch_options, viewport, config + return launch_options, viewport def _make_browser( @@ -373,7 +373,7 @@ async def get_or_create_browser( if _browser is not None: return _browser - launch_options, viewport, config = _launch_options() + launch_options, viewport = _launch_options() source_profile_dir = get_profile_dir() cookie_path = portable_cookie_path(source_profile_dir) source_state = load_source_state(source_profile_dir) diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 9b5d004d..1ef7f4ec 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -108,10 +108,7 @@ def format_tool_error_with_diagnostics( lines.append(f"- Server log: {runtime['log_path']}") if runtime.get("suggested_gist_command"): lines.append(f"- Suggested gist command: {runtime['suggested_gist_command']}") - runtime_summary = f"- Runtime: {runtime.get('current_runtime_id', 'unknown')}" - if runtime.get("hostname"): - runtime_summary += f" on {runtime['hostname']}" - lines.append(runtime_summary) + lines.append(f"- Runtime: {runtime.get('current_runtime_id', 'unknown')}") existing_issues = diagnostics.get("existing_issues") or [] if existing_issues: lines.append("- Matching open issues were found. Review them first:") diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index 56598745..9f27fe3c 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -65,6 +65,8 @@ def test_format_tool_error_with_diagnostics_prefers_existing_issue_comment_flow( assert "#220" in message assert "post it as a comment" in message assert "File the issue here" not in message + assert "- Runtime: linux-arm64-container" in message + assert "test-host" not in message def test_find_existing_issues_query_failure_is_tolerated(monkeypatch, tmp_path): From 858035d06008433bfbf454ae162f32b8b08fdd71 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 08:33:29 +0100 Subject: [PATCH 510/565] fix(login): fail incomplete source setup --- linkedin_mcp_server/core/auth.py | 5 +++++ linkedin_mcp_server/setup.py | 6 +++++- tests/test_core_auth.py | 25 +++++++++++++++++++++++++ tests/test_setup.py | 5 +++-- 4 files changed, 38 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 8f5c2f6d..36f66db4 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -276,6 +276,11 @@ async def wait_for_manual_login(page: Page, timeout: int = 300000) -> None: while True: if await resolve_remember_me_prompt(page): logger.info("Resolved saved-account chooser during manual login flow") + elapsed = (loop.time() - start_time) * 1000 + if elapsed > timeout: + raise AuthenticationError( + "Manual login timeout. Please try again and complete login faster." + ) continue if await is_logged_in(page): diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 2e8def0c..1cfd7f05 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -85,7 +85,11 @@ async def interactive_login( source_state = write_source_state(user_data_dir) print(f" Source session generation: {source_state.login_generation}") else: - print(" Warning: cookie export failed; Docker bridge may not work") + print( + " Warning: cookie export failed; Docker bridge may not work. " + "Run --login again to retry." + ) + return False print(f"Profile saved to {user_data_dir}") return True diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index f9115a88..fe0084a8 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -4,6 +4,7 @@ import pytest +from linkedin_mcp_server.core.exceptions import AuthenticationError from linkedin_mcp_server.core.auth import ( detect_auth_barrier, detect_auth_barrier_quick, @@ -196,3 +197,27 @@ async def fake_is_logged_in(_page): await wait_for_manual_login(page, timeout=1000) assert clicked["value"] is True + + +@pytest.mark.asyncio +async def test_wait_for_manual_login_times_out_when_remember_me_repeats(monkeypatch): + page = MagicMock() + + class _FakeLoop: + def __init__(self): + self._times = iter([0.0, 1.1]) + + def time(self): + return next(self._times) + + monkeypatch.setattr( + "linkedin_mcp_server.core.auth.resolve_remember_me_prompt", + AsyncMock(return_value=True), + ) + monkeypatch.setattr( + "linkedin_mcp_server.core.auth.asyncio.get_running_loop", + lambda: _FakeLoop(), + ) + + with pytest.raises(AuthenticationError, match="Manual login timeout"): + await wait_for_manual_login(page, timeout=1000) diff --git a/tests/test_setup.py b/tests/test_setup.py index 4768da93..1e1b5d56 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -65,7 +65,7 @@ async def test_interactive_login_writes_source_state_when_cookie_export_succeeds @pytest.mark.asyncio -async def test_interactive_login_skips_source_state_when_cookie_export_fails( +async def test_interactive_login_returns_false_when_cookie_export_fails( monkeypatch, tmp_path, capsys ): browser = _make_browser(export_cookies=False) @@ -89,8 +89,9 @@ async def test_interactive_login_skips_source_state_when_cookie_export_fails( ) monkeypatch.setattr("linkedin_mcp_server.setup.asyncio.sleep", AsyncMock()) - assert await interactive_login(tmp_path / "profile") is True + assert await interactive_login(tmp_path / "profile") is False write_source_state.assert_not_called() captured = capsys.readouterr() assert "warning: cookie export failed" in captured.out.lower() + assert "profile saved to" not in captured.out.lower() From aef65540e69c3df22db970c704a04ff30ee21df5 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 08:51:54 +0100 Subject: [PATCH 511/565] fix(debug): reduce noisy diagnostics --- linkedin_mcp_server/drivers/browser.py | 1 + linkedin_mcp_server/error_handler.py | 16 ++++++---------- tests/test_browser_driver.py | 2 +- tests/test_error_handler.py | 25 +++++++++++++++++++++++++ 4 files changed, 33 insertions(+), 11 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index acaf5713..5226dee3 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -148,6 +148,7 @@ async def _feed_auth_succeeds( "feed-after-remember-me", extra={"allow_remember_me": allow_remember_me}, ) + return await _feed_auth_succeeds(browser, allow_remember_me=False) barrier = await detect_auth_barrier_quick(browser.page) if barrier is not None: await record_page_trace( diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index 225fe748..c245ecbf 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -95,19 +95,15 @@ def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: elif isinstance(exception, RateLimitError): wait_time = getattr(exception, "suggested_wait_time", 300) logger.warning("Rate limit%s: %s (wait=%ds)", ctx, exception, wait_time) - _raise_tool_error_with_diagnostics( - exception, - f"Rate limit detected. Wait {wait_time} seconds before trying again.", - context=context, - ) + raise ToolError( + f"Rate limit detected. Wait {wait_time} seconds before trying again." + ) from exception elif isinstance(exception, ProfileNotFoundError): logger.warning("Profile not found%s: %s", ctx, exception) - _raise_tool_error_with_diagnostics( - exception, - "Profile not found. Check the profile URL is correct.", - context=context, - ) + raise ToolError( + "Profile not found. Check the profile URL is correct." + ) from exception elif isinstance(exception, ElementNotFoundError): logger.warning("Element not found%s: %s", ctx, exception) diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 83bce2d1..2a7a55b4 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -176,7 +176,7 @@ async def test_same_runtime_clicks_remember_me_during_feed_validation(tmp_path): result = await get_or_create_browser() assert result is source_browser - assert source_browser.page.goto.await_count == 1 + assert source_browser.page.goto.await_count == 2 assert remember_me.await_count == 1 diff --git a/tests/test_error_handler.py b/tests/test_error_handler.py index ee813804..5ce5e560 100644 --- a/tests/test_error_handler.py +++ b/tests/test_error_handler.py @@ -43,6 +43,31 @@ def test_raises_tool_error_for_profile_not_found(): raise_tool_error(ProfileNotFoundError("gone")) +def test_rate_limit_skips_issue_diagnostics(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.error_handler.build_issue_diagnostics", + lambda *args, **kwargs: (_ for _ in ()).throw( + AssertionError("diagnostics should not run") + ), + ) + error = RateLimitError("Rate limited") + + with pytest.raises(ToolError, match="Wait 300 seconds"): + raise_tool_error(error) + + +def test_profile_not_found_skips_issue_diagnostics(monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.error_handler.build_issue_diagnostics", + lambda *args, **kwargs: (_ for _ in ()).throw( + AssertionError("diagnostics should not run") + ), + ) + + with pytest.raises(ToolError, match="Profile not found"): + raise_tool_error(ProfileNotFoundError("gone")) + + def test_raises_tool_error_for_network_error(): with pytest.raises(ToolError, match="Network error"): raise_tool_error(NetworkError("timeout")) From 13141514f04f18f652874887a4db8c252aa8744e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 09:00:48 +0100 Subject: [PATCH 512/565] fix(auth): relax remember-me selector --- linkedin_mcp_server/core/auth.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index 36f66db4..b67cceac 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -30,10 +30,7 @@ ("continue as", "sign in using another account"), ) _REMEMBER_ME_CONTAINER_SELECTOR = "#rememberme-div" -_REMEMBER_ME_BUTTON_SELECTOR = ( - "#rememberme-div > div.memberList-container > div > div > " - "div.member-profile-container.list-box > div.member-profile-block > button" -) +_REMEMBER_ME_BUTTON_SELECTOR = "#rememberme-div button" async def warm_up_browser(page: Page) -> None: @@ -206,6 +203,10 @@ async def resolve_remember_me_prompt(page: Page) -> bool: _REMEMBER_ME_BUTTON_SELECTOR, target_count, ) + if target_count == 0: + logger.debug( + "Remember-me container appeared without any matching button selector" + ) try: await target.wait_for(state="visible", timeout=3000) logger.debug("Remember-me button became visible") From aa86a1b0c2f52c23159b20e8573f8b023f8f53aa Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 09:14:58 +0100 Subject: [PATCH 513/565] fix(bridge): honor debug cookie override --- linkedin_mcp_server/core/auth.py | 1 + linkedin_mcp_server/drivers/browser.py | 15 ++++----- tests/test_browser_driver.py | 42 +++++++++++++++++++++++--- tests/test_core_auth.py | 17 +++++++++++ 4 files changed, 64 insertions(+), 11 deletions(-) diff --git a/linkedin_mcp_server/core/auth.py b/linkedin_mcp_server/core/auth.py index b67cceac..08eb2b9e 100644 --- a/linkedin_mcp_server/core/auth.py +++ b/linkedin_mcp_server/core/auth.py @@ -207,6 +207,7 @@ async def resolve_remember_me_prompt(page: Page) -> bool: logger.debug( "Remember-me container appeared without any matching button selector" ) + return False try: await target.wait_for(state="visible", timeout=3000) logger.debug("Remember-me button became visible") diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 5226dee3..cfe3114a 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -245,7 +245,7 @@ async def _bridge_runtime_profile( launch_options: dict[str, str], viewport: dict[str, int], persist_runtime: bool, - cookie_preset: str = "auth_minimal", + cookie_preset: str | None = None, ) -> BrowserManager: bridge_started_at = utcnow_iso() clear_runtime_profile(runtime_id, get_source_profile_dir()) @@ -422,7 +422,6 @@ async def get_or_create_browser( launch_options=launch_options, viewport=viewport, persist_runtime=False, - cookie_preset="auth_minimal", ) _apply_browser_settings(browser) _browser = browser @@ -478,7 +477,6 @@ async def get_or_create_browser( launch_options=launch_options, viewport=viewport, persist_runtime=True, - cookie_preset="auth_minimal", ) _apply_browser_settings(browser) _browser = browser @@ -526,10 +524,13 @@ def set_headless(headless: bool) -> None: async def validate_session() -> bool: """ - Check if the current session is still valid (logged in). + Check whether startup authentication has already succeeded for this browser. + + Mid-session expiry is detected during real LinkedIn navigations and scraper + auth checks rather than via a fresh login probe on every tool call. Returns: - True if session is valid and user is logged in + True if startup authentication succeeded for the current browser """ browser = await get_or_create_browser() if browser.is_authenticated: @@ -539,10 +540,10 @@ async def validate_session() -> bool: async def ensure_authenticated() -> None: """ - Validate session and raise if expired. + Confirm that the shared browser completed startup authentication. Raises: - AuthenticationError: If session is expired or invalid + AuthenticationError: If no authenticated browser session is available """ if not await validate_session(): raise AuthenticationError("Session expired or invalid.") diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 2a7a55b4..0061a154 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -275,7 +275,7 @@ async def test_default_foreign_runtime_bridges_fresh_each_startup(tmp_path): assert ctor.call_args.kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( portable_cookie_path(tmp_path / "profile"), - preset_name="auth_minimal", + preset_name=None, ) first_browser.export_storage_state.assert_not_awaited() first_browser.close.assert_not_awaited() @@ -325,7 +325,7 @@ async def test_experimental_missing_derived_runtime_bridges_and_checkpoint_commi assert ctor.call_args_list[1].kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( portable_cookie_path(tmp_path / "profile"), - preset_name="auth_minimal", + preset_name=None, ) first_browser.export_storage_state.assert_awaited_once_with( expected_storage, @@ -372,7 +372,7 @@ async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( assert ctor.call_count == 1 first_browser.import_cookies.assert_awaited_once_with( portable_cookie_path(tmp_path / "profile"), - preset_name="auth_minimal", + preset_name=None, ) first_browser.export_storage_state.assert_not_awaited() first_browser.close.assert_not_awaited() @@ -424,11 +424,45 @@ async def test_debug_bridge_every_startup_skips_matching_committed_profile( assert ctor.call_args.kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( portable_cookie_path(tmp_path / "profile"), - preset_name="auth_minimal", + preset_name=None, ) first_browser.export_storage_state.assert_not_awaited() +@pytest.mark.asyncio +async def test_debug_bridge_cookie_set_flows_through_foreign_runtime_bridge( + tmp_path, monkeypatch +): + _write_source_state( + tmp_path, runtime_id="macos-arm64-host", login_generation="gen-2" + ) + first_browser = _make_mock_browser() + first_browser.import_cookies = AsyncMock(return_value=True) + monkeypatch.setenv("LINKEDIN_DEBUG_BRIDGE_COOKIE_SET", "bridge_core") + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=first_browser, + ), + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + ): + await get_or_create_browser() + + first_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile"), + preset_name=None, + ) + + @pytest.mark.asyncio async def test_experimental_stale_derived_runtime_rebuilds_from_new_generation( tmp_path, monkeypatch diff --git a/tests/test_core_auth.py b/tests/test_core_auth.py index fe0084a8..017a15da 100644 --- a/tests/test_core_auth.py +++ b/tests/test_core_auth.py @@ -175,6 +175,23 @@ async def test_resolve_remember_me_prompt_returns_false_when_absent(): assert result is False +@pytest.mark.asyncio +async def test_resolve_remember_me_prompt_returns_false_when_container_has_no_button(): + page = MagicMock() + target = MagicMock() + target.wait_for = AsyncMock() + locator = MagicMock() + locator.count = AsyncMock(return_value=0) + locator.first = target + page.locator.return_value = locator + page.wait_for_selector = AsyncMock() + + result = await resolve_remember_me_prompt(page) + + assert result is False + target.wait_for.assert_not_awaited() + + @pytest.mark.asyncio async def test_wait_for_manual_login_clicks_saved_account(monkeypatch): page = MagicMock() From b8885bb03705d997df698b0a55f3792c5bfc8d34 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 09:27:49 +0100 Subject: [PATCH 514/565] refactor(debug): drop private cookie helper --- scripts/debug_cookie_bridge.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/scripts/debug_cookie_bridge.py b/scripts/debug_cookie_bridge.py index 109e04cd..e5761169 100644 --- a/scripts/debug_cookie_bridge.py +++ b/scripts/debug_cookie_bridge.py @@ -106,7 +106,7 @@ def load_portable_cookies( ) -> list[dict[str, Any]]: all_cookies = json.loads(cookie_path.read_text()) normalized = [ - BrowserManager._normalize_cookie_domain(cookie) + _normalize_cookie_domain(cookie) for cookie in all_cookies if "linkedin.com" in cookie.get("domain", "") ] @@ -116,6 +116,13 @@ def load_portable_cookies( return [cookie for cookie in normalized if cookie.get("name") in keep_names] +def _normalize_cookie_domain(cookie: dict[str, Any]) -> dict[str, Any]: + domain = cookie.get("domain", "") + if domain in (".www.linkedin.com", "www.linkedin.com"): + return {**cookie, "domain": ".linkedin.com"} + return cookie + + async def capture_page_state(page, *, body_lines: int) -> dict[str, Any]: try: title = await page.title() From 1d2f6dd36986a8899052dac9644f752af4031b82 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 09:40:17 +0100 Subject: [PATCH 515/565] fix(browser): close startup failures --- linkedin_mcp_server/drivers/browser.py | 14 ++++---- tests/test_browser_driver.py | 50 ++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index cfe3114a..94b08159 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -223,8 +223,8 @@ async def _authenticate_existing_profile( browser = _make_browser( profile_dir, launch_options=launch_options, viewport=viewport ) - await browser.start() try: + await browser.start() if not await _feed_auth_succeeds(browser): raise AuthenticationError( f"Stored runtime profile is invalid: {profile_dir}. Run with --login to refresh the source session." @@ -256,13 +256,13 @@ async def _bridge_runtime_profile( browser = _make_browser( profile_dir, launch_options=launch_options, viewport=viewport ) - await browser.start() - await record_page_trace( - browser.page, - "bridge-browser-started", - extra={"profile_dir": str(profile_dir)}, - ) try: + await browser.start() + await record_page_trace( + browser.page, + "bridge-browser-started", + extra={"profile_dir": str(profile_dir)}, + ) await browser.page.goto( "https://www.linkedin.com/feed/", wait_until="domcontentloaded" ) diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 0061a154..19d0b457 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -538,6 +538,56 @@ async def test_experimental_matching_derived_runtime_failure_does_not_fallback_t invalid_browser.import_cookies.assert_not_awaited() +@pytest.mark.asyncio +async def test_same_runtime_start_failure_closes_browser(tmp_path): + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + source_browser = _make_mock_browser() + source_browser.start = AsyncMock(side_effect=RuntimeError("start failed")) + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="macos-arm64-host", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=source_browser, + ), + pytest.raises(RuntimeError, match="start failed"), + ): + await get_or_create_browser() + + source_browser.close.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_default_foreign_runtime_start_failure_closes_browser(tmp_path): + _write_source_state(tmp_path, runtime_id="macos-arm64-host") + first_browser = _make_mock_browser() + first_browser.start = AsyncMock(side_effect=RuntimeError("start failed")) + + with ( + patch( + "linkedin_mcp_server.drivers.browser.get_runtime_id", + return_value="linux-amd64-container", + ), + patch( + "linkedin_mcp_server.drivers.browser.BrowserManager", + return_value=first_browser, + ), + pytest.raises(RuntimeError, match="start failed"), + ): + await get_or_create_browser() + + first_browser.close.assert_awaited_once() + assert not runtime_profile_dir( + "linux-amd64-container", tmp_path / "profile" + ).exists() + assert not runtime_state_path( + "linux-amd64-container", tmp_path / "profile" + ).exists() + + @pytest.mark.asyncio async def test_experimental_checkpoint_reopen_failure_clears_runtime_dir( tmp_path, monkeypatch From f6e153a943a2383f136f9e34dabeeb04b448ab45 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 10:06:48 +0100 Subject: [PATCH 516/565] fix(setup): align portable cookie export --- linkedin_mcp_server/drivers/browser.py | 3 +-- linkedin_mcp_server/logging_config.py | 3 +++ linkedin_mcp_server/setup.py | 4 ++-- tests/test_browser_driver.py | 15 +++++---------- tests/test_setup.py | 7 +++++++ 5 files changed, 18 insertions(+), 14 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 94b08159..0b6054fa 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -245,7 +245,6 @@ async def _bridge_runtime_profile( launch_options: dict[str, str], viewport: dict[str, int], persist_runtime: bool, - cookie_preset: str | None = None, ) -> BrowserManager: bridge_started_at = utcnow_iso() clear_runtime_profile(runtime_id, get_source_profile_dir()) @@ -268,7 +267,7 @@ async def _bridge_runtime_profile( ) await stabilize_navigation("pre-import feed navigation", logger) await record_page_trace(browser.page, "bridge-after-pre-import-feed") - if not await browser.import_cookies(cookie_path, preset_name=cookie_preset): + if not await browser.import_cookies(cookie_path): raise AuthenticationError( "Portable authentication could not be imported. Run with --login to create a fresh source session." ) diff --git a/linkedin_mcp_server/logging_config.py b/linkedin_mcp_server/logging_config.py index 8323a460..d5be160c 100644 --- a/linkedin_mcp_server/logging_config.py +++ b/linkedin_mcp_server/logging_config.py @@ -128,6 +128,9 @@ def configure_logging(log_level: str = "WARNING", json_format: bool = False) -> root_logger.addHandler(file_handler) _TRACE_FILE_HANDLER = file_handler if not _TRACE_CLEANUP_REGISTERED: + # The atexit fallback intentionally delegates the keep/delete + # decision to teardown_trace_logging(), which re-checks runtime + # trace retention state via cleanup_trace_dir(). atexit.register(teardown_trace_logging) _TRACE_CLEANUP_REGISTERED = True diff --git a/linkedin_mcp_server/setup.py b/linkedin_mcp_server/setup.py index 1cfd7f05..49f2bcc8 100644 --- a/linkedin_mcp_server/setup.py +++ b/linkedin_mcp_server/setup.py @@ -14,7 +14,7 @@ wait_for_manual_login, warm_up_browser, ) -from linkedin_mcp_server.session_state import write_source_state +from linkedin_mcp_server.session_state import portable_cookie_path, write_source_state from linkedin_mcp_server.drivers.browser import get_profile_dir @@ -80,7 +80,7 @@ async def interactive_login( # Export source-session cookies for the one-time foreign-runtime bridge. # Docker now checkpoint-commits its own derived runtime profile after the # first successful /feed/ recovery instead of relying on browser teardown. - if await browser.export_cookies(): + if await browser.export_cookies(portable_cookie_path(user_data_dir)): print(" Cookies exported for Docker portability") source_state = write_source_state(user_data_dir) print(f" Source session generation: {source_state.login_generation}") diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 19d0b457..fa76ad69 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -274,8 +274,7 @@ async def test_default_foreign_runtime_bridges_fresh_each_startup(tmp_path): assert ctor.call_count == 1 assert ctor.call_args.kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile"), - preset_name=None, + portable_cookie_path(tmp_path / "profile") ) first_browser.export_storage_state.assert_not_awaited() first_browser.close.assert_not_awaited() @@ -324,8 +323,7 @@ async def test_experimental_missing_derived_runtime_bridges_and_checkpoint_commi assert ctor.call_args_list[0].kwargs["user_data_dir"] == expected_profile assert ctor.call_args_list[1].kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile"), - preset_name=None, + portable_cookie_path(tmp_path / "profile") ) first_browser.export_storage_state.assert_awaited_once_with( expected_storage, @@ -371,8 +369,7 @@ async def test_debug_skip_checkpoint_restart_keeps_fresh_bridged_browser( assert result is first_browser assert ctor.call_count == 1 first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile"), - preset_name=None, + portable_cookie_path(tmp_path / "profile") ) first_browser.export_storage_state.assert_not_awaited() first_browser.close.assert_not_awaited() @@ -423,8 +420,7 @@ async def test_debug_bridge_every_startup_skips_matching_committed_profile( assert ctor.call_count == 1 assert ctor.call_args.kwargs["user_data_dir"] == expected_profile first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile"), - preset_name=None, + portable_cookie_path(tmp_path / "profile") ) first_browser.export_storage_state.assert_not_awaited() @@ -458,8 +454,7 @@ async def test_debug_bridge_cookie_set_flows_through_foreign_runtime_bridge( await get_or_create_browser() first_browser.import_cookies.assert_awaited_once_with( - portable_cookie_path(tmp_path / "profile"), - preset_name=None, + portable_cookie_path(tmp_path / "profile") ) diff --git a/tests/test_setup.py b/tests/test_setup.py index 1e1b5d56..68a6d9f9 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -3,6 +3,7 @@ import pytest +from linkedin_mcp_server.session_state import portable_cookie_path from linkedin_mcp_server.setup import interactive_login @@ -58,6 +59,9 @@ async def test_interactive_login_writes_source_state_when_cookie_export_succeeds assert await interactive_login(tmp_path / "profile") is True + browser.export_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) write_source_state.assert_called_once_with(tmp_path / "profile") captured = capsys.readouterr() assert "cookies exported for docker portability" in captured.out.lower() @@ -91,6 +95,9 @@ async def test_interactive_login_returns_false_when_cookie_export_fails( assert await interactive_login(tmp_path / "profile") is False + browser.export_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) write_source_state.assert_not_called() captured = capsys.readouterr() assert "warning: cookie export failed" in captured.out.lower() From a0b66c5150ec86add1468ada81a4673d751593dc Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 10:20:57 +0100 Subject: [PATCH 517/565] fix(bridge): rebridge stale derived profile --- linkedin_mcp_server/debug_trace.py | 2 +- linkedin_mcp_server/drivers/browser.py | 24 +++++++++++++++--------- tests/test_browser_driver.py | 19 ++++++++++++------- tests/test_debug_trace.py | 12 ++++++++++++ 4 files changed, 40 insertions(+), 17 deletions(-) diff --git a/linkedin_mcp_server/debug_trace.py b/linkedin_mcp_server/debug_trace.py index 6bb295b6..1bc76cfd 100644 --- a/linkedin_mcp_server/debug_trace.py +++ b/linkedin_mcp_server/debug_trace.py @@ -111,7 +111,7 @@ def _safe_source_profile_dir() -> Path: try: return get_source_profile_dir() except Exception: - return Path(os.getenv("USER_DATA_DIR", "~/.linkedin-mcp/profile")).expanduser() + return Path("~/.linkedin-mcp/profile").expanduser() async def record_page_trace( diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 0b6054fa..7137e601 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -447,15 +447,21 @@ async def get_or_create_browser( current_runtime_id, derived_profile_dir, ) - browser = await _authenticate_existing_profile( - derived_profile_dir, - launch_options=launch_options, - viewport=viewport, - ) - _apply_browser_settings(browser) - _browser = browser - _browser_cookie_export_path = None - return _browser + try: + browser = await _authenticate_existing_profile( + derived_profile_dir, + launch_options=launch_options, + viewport=viewport, + ) + _apply_browser_settings(browser) + _browser = browser + _browser_cookie_export_path = None + return _browser + except AuthenticationError: + logger.warning( + "Derived runtime profile auth failed for %s; re-bridging from source cookies", + current_runtime_id, + ) if force_bridge: logger.warning( diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index fa76ad69..7d66deb4 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -502,15 +502,16 @@ async def test_experimental_stale_derived_runtime_rebuilds_from_new_generation( @pytest.mark.asyncio -async def test_experimental_matching_derived_runtime_failure_does_not_fallback_to_bridge( +async def test_experimental_matching_derived_runtime_failure_rebridges_from_source( tmp_path, monkeypatch ): - from linkedin_mcp_server.core import AuthenticationError - _write_source_state(tmp_path, runtime_id="macos-arm64-host") _write_runtime_state(tmp_path, "linux-amd64-container") invalid_browser = _make_mock_browser() + bridged_browser = _make_mock_browser() + bridged_browser.import_cookies = AsyncMock(return_value=True) monkeypatch.setenv("LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION", "1") + monkeypatch.setenv("LINKEDIN_DEBUG_SKIP_CHECKPOINT_RESTART", "1") with ( patch( @@ -519,18 +520,22 @@ async def test_experimental_matching_derived_runtime_failure_does_not_fallback_t ), patch( "linkedin_mcp_server.drivers.browser.BrowserManager", - return_value=invalid_browser, + side_effect=[invalid_browser, bridged_browser], ), patch( "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", new_callable=AsyncMock, - return_value="login title: linkedin login", + side_effect=["login title: linkedin login", None], ), - pytest.raises(AuthenticationError), ): - await get_or_create_browser() + result = await get_or_create_browser() + assert result is bridged_browser + invalid_browser.close.assert_awaited_once() invalid_browser.import_cookies.assert_not_awaited() + bridged_browser.import_cookies.assert_awaited_once_with( + portable_cookie_path(tmp_path / "profile") + ) @pytest.mark.asyncio diff --git a/tests/test_debug_trace.py b/tests/test_debug_trace.py index 7907457e..42b0e656 100644 --- a/tests/test_debug_trace.py +++ b/tests/test_debug_trace.py @@ -1,9 +1,11 @@ import json +from pathlib import Path from unittest.mock import AsyncMock, MagicMock import pytest from linkedin_mcp_server.debug_trace import ( + _safe_source_profile_dir, cleanup_trace_dir, get_trace_dir, mark_trace_for_retention, @@ -100,3 +102,13 @@ async def test_reset_trace_state_resets_step_counter(monkeypatch, tmp_path): (second_trace_dir / "trace.jsonl").read_text().splitlines()[0] ) assert second_payload["step_id"] == 1 + + +def test_safe_source_profile_dir_ignores_generic_env_fallback(monkeypatch): + monkeypatch.setenv("USER_DATA_DIR", "/tmp/unrelated-user-data") + monkeypatch.setattr( + "linkedin_mcp_server.debug_trace.get_source_profile_dir", + lambda: (_ for _ in ()).throw(RuntimeError("boom")), + ) + + assert _safe_source_profile_dir() == Path("~/.linkedin-mcp/profile").expanduser() From 0c72ead8cf1b512fa1ea99b146b6f1dcc3aca570 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 10:46:44 +0100 Subject: [PATCH 518/565] refactor(auth): harden session metadata --- linkedin_mcp_server/drivers/browser.py | 16 +++------ linkedin_mcp_server/session_state.py | 18 +++++++++-- tests/test_browser_driver.py | 30 +++++++++++++++++ tests/test_session_state.py | 45 ++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 14 deletions(-) diff --git a/linkedin_mcp_server/drivers/browser.py b/linkedin_mcp_server/drivers/browser.py index 7137e601..a19cf832 100644 --- a/linkedin_mcp_server/drivers/browser.py +++ b/linkedin_mcp_server/drivers/browser.py @@ -164,11 +164,6 @@ async def _feed_auth_succeeds( await stabilize_navigation( "remember-me resolution after feed failure", logger ) - await record_page_trace( - browser.page, - "feed-navigation-error-before-remember-me-retry", - extra={"error": f"{type(exc).__name__}: {exc}"}, - ) await record_page_trace( browser.page, "feed-after-remember-me-error-recovery", @@ -246,12 +241,11 @@ async def _bridge_runtime_profile( viewport: dict[str, int], persist_runtime: bool, ) -> BrowserManager: + source_profile_dir = get_source_profile_dir() bridge_started_at = utcnow_iso() - clear_runtime_profile(runtime_id, get_source_profile_dir()) + clear_runtime_profile(runtime_id, source_profile_dir) profile_dir.parent.mkdir(parents=True, exist_ok=True) - storage_state_path = runtime_storage_state_path( - runtime_id, get_source_profile_dir() - ) + storage_state_path = runtime_storage_state_path(runtime_id, source_profile_dir) browser = _make_browser( profile_dir, launch_options=launch_options, viewport=viewport ) @@ -332,7 +326,7 @@ async def _bridge_runtime_profile( runtime_id, source_state, storage_state_path, - get_source_profile_dir(), + source_profile_dir, created_at=bridge_started_at, ) logger.info("Derived runtime profile committed for %s", runtime_id) @@ -343,7 +337,7 @@ async def _bridge_runtime_profile( raise except Exception: await browser.close() - clear_runtime_profile(runtime_id, get_source_profile_dir()) + clear_runtime_profile(runtime_id, source_profile_dir) raise diff --git a/linkedin_mcp_server/session_state.py b/linkedin_mcp_server/session_state.py index 00eaaa20..a6e590a6 100644 --- a/linkedin_mcp_server/session_state.py +++ b/linkedin_mcp_server/session_state.py @@ -2,7 +2,7 @@ from __future__ import annotations -from dataclasses import asdict, dataclass +from dataclasses import asdict, dataclass, fields import json import logging import platform @@ -44,6 +44,10 @@ class RuntimeState: commit_method: str +_SOURCE_STATE_FIELDS = frozenset(field.name for field in fields(SourceState)) +_RUNTIME_STATE_FIELDS = frozenset(field.name for field in fields(RuntimeState)) + + def get_source_profile_dir() -> Path: """Return the configured source profile directory.""" return Path(get_config().browser.user_data_dir).expanduser() @@ -196,7 +200,9 @@ def load_source_state(source_profile_dir: Path | None = None) -> SourceState | N if not data: return None try: - return SourceState(**data) + return SourceState( + **{key: value for key, value in data.items() if key in _SOURCE_STATE_FIELDS} + ) except TypeError: logger.warning("Ignoring invalid source-state.json") return None @@ -227,7 +233,13 @@ def load_runtime_state( if not data: return None try: - return RuntimeState(**data) + return RuntimeState( + **{ + key: value + for key, value in data.items() + if key in _RUNTIME_STATE_FIELDS + } + ) except TypeError: logger.warning("Ignoring invalid runtime-state.json for %s", runtime_id) return None diff --git a/tests/test_browser_driver.py b/tests/test_browser_driver.py index 7d66deb4..9b12c92e 100644 --- a/tests/test_browser_driver.py +++ b/tests/test_browser_driver.py @@ -205,6 +205,36 @@ async def test_feed_auth_retries_feed_after_remember_me_error_recovery(): remember_me.assert_awaited_once() +@pytest.mark.asyncio +async def test_feed_auth_records_single_post_recovery_trace(): + browser = _make_mock_browser() + browser.page.goto = AsyncMock( + side_effect=[Exception("net::ERR_TOO_MANY_REDIRECTS"), None] + ) + + with ( + patch( + "linkedin_mcp_server.drivers.browser.resolve_remember_me_prompt", + new_callable=AsyncMock, + return_value=True, + ), + patch( + "linkedin_mcp_server.drivers.browser.detect_auth_barrier_quick", + new_callable=AsyncMock, + return_value=None, + ), + patch( + "linkedin_mcp_server.drivers.browser.record_page_trace", + new_callable=AsyncMock, + ) as record_page_trace, + ): + assert await _feed_auth_succeeds(browser) is True + + steps = [call.args[1] for call in record_page_trace.await_args_list] + assert "feed-after-remember-me-error-recovery" in steps + assert "feed-navigation-error-before-remember-me-retry" not in steps + + @pytest.mark.asyncio async def test_experimental_derived_runtime_reuses_matching_committed_profile( tmp_path, monkeypatch diff --git a/tests/test_session_state.py b/tests/test_session_state.py index c0b525bc..a077d7cc 100644 --- a/tests/test_session_state.py +++ b/tests/test_session_state.py @@ -3,6 +3,7 @@ load_runtime_state, load_source_state, runtime_profile_dir, + runtime_state_path, runtime_storage_state_path, source_state_path, write_runtime_state, @@ -58,6 +59,50 @@ def test_write_runtime_state_tracks_source_generation(monkeypatch, isolate_profi ) +def test_load_source_state_ignores_unknown_fields(monkeypatch, isolate_profile_dir): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_runtime_id", + lambda: "macos-arm64-host", + ) + state = write_source_state(isolate_profile_dir) + payload = source_state_path(isolate_profile_dir) + payload.write_text( + payload.read_text().replace("}", ', "future_field": "keep calm"}', 1) + ) + + assert load_source_state(isolate_profile_dir) == state + + +def test_load_runtime_state_ignores_unknown_fields(monkeypatch, isolate_profile_dir): + monkeypatch.setattr( + "linkedin_mcp_server.session_state.get_runtime_id", + lambda: "macos-arm64-host", + ) + source_state = write_source_state(isolate_profile_dir) + + storage_state = runtime_storage_state_path( + "linux-amd64-container", + isolate_profile_dir, + ) + storage_state.parent.mkdir(parents=True, exist_ok=True) + storage_state.write_text("{}") + runtime_state = write_runtime_state( + "linux-amd64-container", + source_state, + storage_state, + isolate_profile_dir, + ) + payload = runtime_state_path("linux-amd64-container", isolate_profile_dir) + payload.write_text( + payload.read_text().replace("}", ', "future_field": "still fine"}', 1) + ) + + assert ( + load_runtime_state("linux-amd64-container", isolate_profile_dir) + == runtime_state + ) + + def test_write_runtime_state_accepts_explicit_created_at( monkeypatch, isolate_profile_dir ): From 842f6093c4aff0da8ce07b88ec9d9e1aaa890706 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 11:05:19 +0100 Subject: [PATCH 519/565] fix(diagnostics): skip missing gist files --- linkedin_mcp_server/error_diagnostics.py | 2 +- tests/test_error_diagnostics.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 1ef7f4ec..6c46b46f 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -298,7 +298,7 @@ def _build_gist_command( ) -> str: trace_path = issue_dir / "trace.jsonl" files = [str(issue_path)] - if log_path is not None: + if log_path is not None and log_path.exists(): files.append(str(log_path)) if trace_path.exists(): files.append(str(trace_path)) diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index 9f27fe3c..7213e1b3 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -88,6 +88,26 @@ def test_find_existing_issues_query_failure_is_tolerated(monkeypatch, tmp_path): assert diagnostics["issue_search_skipped"] is False +def test_build_issue_diagnostics_omits_missing_server_log_from_gist( + monkeypatch, tmp_path +): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics._find_existing_issues", + lambda payload: [], + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="extract-page", + target_url="https://www.linkedin.com/in/test/", + section_name="main_profile", + ) + + gist_command = diagnostics["runtime"]["suggested_gist_command"] + assert "server.log" not in gist_command + + @pytest.mark.asyncio async def test_build_issue_diagnostics_skips_network_search_in_event_loop( monkeypatch, tmp_path From 9d306d25cb9f7e5b00b8d36d8a35512cbca7ebb0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 13 Mar 2026 11:33:49 +0100 Subject: [PATCH 520/565] chore: Bump version to 4.4.0 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b2227b95..9c440c23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.3.0" +version = "4.4.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 965bf715..4c540573 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.3.0" +version = "4.4.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 7c1564658a29e088821c7609b7e202fe5318f458 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 10:36:07 +0000 Subject: [PATCH 521/565] chore: update manifest.json and docker-compose.yml to v4.4.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8169c720..b560308a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.3.0 + image: stickerdaniel/linkedin-mcp-server:4.4.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index b0b49295..5edb401d 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.3.0", + "version": "4.4.0", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.3.0", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.0", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.3.0" + "stickerdaniel/linkedin-mcp-server:4.4.0" ] } }, From 46a9fbe33f10c5ed6174a36d6c12d1402d551fea Mon Sep 17 00:00:00 2001 From: Jonathan Zarecki Date: Sun, 15 Mar 2026 22:58:06 +0200 Subject: [PATCH 522/565] fix(scraping): wait for search results content before extracting Search results pages load a placeholder first, then fill in the actual results via JavaScript. The extractor reads innerText before content loads, causing search_people to return empty sections intermittently. Add wait_for_function for /search/results/ URLs (same pattern as the activity feed fix in #203). Wait up to 10s for main.innerText > 100 chars before extracting. Tested locally: 100% success rate vs ~60% before the fix. Made-with: Cursor --- linkedin_mcp_server/scraping/extractor.py | 16 ++++ tests/test_scraping.py | 101 ++++++++++++++++++++++ 2 files changed, 117 insertions(+) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 0b70da8f..42c2e774 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -428,6 +428,22 @@ async def _extract_page_once( except PlaywrightTimeoutError: logger.debug("Activity feed content did not appear on %s", url) + # Search results pages load a placeholder first then fill in results + # via JavaScript. Wait for actual content before extracting. + is_search = "/search/results/" in url + if is_search: + try: + await self._page.wait_for_function( + """() => { + const main = document.querySelector('main'); + if (!main) return false; + return main.innerText.length > 100; + }""", + timeout=10000, + ) + except PlaywrightTimeoutError: + logger.debug("Search results content did not appear on %s", url) + # Scroll to trigger lazy loading if is_activity: await scroll_to_bottom(self._page, pause_time=1.0, max_scrolls=10) diff --git a/tests/test_scraping.py b/tests/test_scraping.py index 0abaf39e..279086e9 100644 --- a/tests/test_scraping.py +++ b/tests/test_scraping.py @@ -1672,3 +1672,104 @@ async def test_activity_page_timeout_proceeds_gracefully(self, mock_page): # Should return whatever text is available, not crash assert result.text == tab_headers + + +class TestSearchResultsExtraction: + """Tests for search results page detection and wait behavior in _extract_page_once.""" + + async def test_search_results_page_waits_for_content(self, mock_page): + """Search results URLs should call wait_for_function to wait for content.""" + mock_page.evaluate = AsyncMock( + return_value={ + "source": "root", + "text": "Search results for John Doe. " * 10, + "references": [], + } + ) + mock_page.wait_for_function = AsyncMock() + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor._extract_page_once( + "https://www.linkedin.com/search/results/people/?keywords=John+Doe", + section_name="search_results", + ) + + mock_page.wait_for_function.assert_awaited_once() + assert len(result.text) > 100 + + async def test_non_search_page_does_not_wait_for_search_content(self, mock_page): + """Non-search URLs should not trigger the search results wait.""" + mock_page.evaluate = AsyncMock( + return_value={"source": "root", "text": "Profile text", "references": []} + ) + mock_page.wait_for_function = AsyncMock() + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + await extractor._extract_page_once( + "https://www.linkedin.com/in/billgates/", + section_name="main_profile", + ) + + mock_page.wait_for_function.assert_not_awaited() + + async def test_search_results_timeout_proceeds_gracefully(self, mock_page): + """When search results never load, extraction proceeds with available text.""" + from patchright.async_api import TimeoutError as PlaywrightTimeoutError + + placeholder = "Search results for John Doe. No results found" + mock_page.evaluate = AsyncMock( + return_value={"source": "root", "text": placeholder, "references": []} + ) + mock_page.wait_for_function = AsyncMock( + side_effect=PlaywrightTimeoutError("Timeout") + ) + extractor = LinkedInExtractor(mock_page) + with ( + patch( + "linkedin_mcp_server.scraping.extractor.scroll_to_bottom", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.detect_rate_limit", + new_callable=AsyncMock, + ), + patch( + "linkedin_mcp_server.scraping.extractor.handle_modal_close", + new_callable=AsyncMock, + return_value=False, + ), + ): + result = await extractor._extract_page_once( + "https://www.linkedin.com/search/results/people/?keywords=John+Doe", + section_name="search_results", + ) + + assert result.text == placeholder From 6a2b0d2e9b096199244e9820c0dec2e294a5aa9d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 01:53:01 +0100 Subject: [PATCH 523/565] chore: Bump version to 4.4.1 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9c440c23..2df7dfaf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.4.0" +version = "4.4.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 4c540573..0c2018a2 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.4.0" +version = "4.4.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 922b2adcc6834ad709d0d247c0c35b4ccf740121 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 16 Mar 2026 00:54:45 +0000 Subject: [PATCH 524/565] chore: update manifest.json and docker-compose.yml to v4.4.1 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b560308a..47e5778a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.4.0 + image: stickerdaniel/linkedin-mcp-server:4.4.1 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 5edb401d..92b35b65 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.4.0", + "version": "4.4.1", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.0", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.1", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.4.0" + "stickerdaniel/linkedin-mcp-server:4.4.1" ] } }, From bc8fc78703c47964b20ae9c59011f279f7d6bf29 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 02:15:45 +0100 Subject: [PATCH 525/565] docs: Trim AGENTS.md to behavioral guidance, clean README Docker section --- AGENTS.md | 193 +++++++-------------------------------------- README.md | 28 +------ docs/docker-hub.md | 18 +---- 3 files changed, 31 insertions(+), 208 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 1c956f82..a9f2cc07 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -4,153 +4,37 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co ## Development Commands -**Environment Setup:** - -- Use `uv` for dependency management: `uv sync` (installs all dependencies) -- Development dependencies: `uv sync --group dev` -- Bump version: see [Release Process](#release-process) below -- Install browser: `uv run patchright install chromium` -- Run server locally: `uv run -m linkedin_mcp_server --no-headless` -- Run via uvx (PyPI/package verification only): `uvx linkedin-scraper-mcp` -- Run in Docker: `docker run -it --rm -v ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp stickerdaniel/linkedin-mcp-server:latest` - -**Code Quality:** - +- Use `uv` for dependency management: `uv sync` (dev: `uv sync --group dev`) - Lint: `uv run ruff check .` (auto-fix with `--fix`) - Format: `uv run ruff format .` - Type check: `uv run ty check` (using ty, not mypy) - Tests: `uv run pytest` (with coverage: `uv run pytest --cov`) -- Pre-commit hooks: `uv run pre-commit install` then `uv run pre-commit run --all-files` - -**Docker Commands:** - -- Build: `docker build -t linkedin-mcp-server .` -- Login for local development: `uv run -m linkedin_mcp_server --login` -- Login for packaged-distribution verification: `uvx linkedin-scraper-mcp --login` - -## Architecture Overview - -This is a **LinkedIn MCP (Model Context Protocol) Server** that enables AI assistants to interact with LinkedIn through web scraping. The codebase follows a two-phase startup pattern: - -1. **Authentication Phase** (`authentication.py`) - Validates LinkedIn browser profile exists -2. **Server Runtime Phase** (`server.py`) - Runs FastMCP server with tool registration - -**Core Components:** - -- `cli_main.py` - Entry point with CLI argument parsing and orchestration -- `server.py` - FastMCP server setup and tool registration -- `tools/` - LinkedIn scraping tools (person, company, job profiles) -- `drivers/browser.py` - Patchright browser management with persistent profile (singleton) -- `core/` - Inlined browser, auth, and utility code (replaces `linkedin_scraper` dependency) -- `scraping/` - innerText extraction engine with explicit section selection -- `config/` - Configuration management (schema, loaders) -- `authentication.py` - LinkedIn profile-based authentication - -**Tool Categories:** - -- **Person Tools** (`tools/person.py`) - Profile scraping with explicit section selection -- **Company Tools** (`tools/company.py`) - Company profile and posts extraction -- **Job Tools** (`tools/job.py`) - Job posting details and search functionality +- Pre-commit: `uv run pre-commit install` then `uv run pre-commit run --all-files` +- Run server locally: `uv run -m linkedin_mcp_server --no-headless` +- Run via uvx (PyPI/package verification only): `uvx linkedin-scraper-mcp` +- Docker build: `docker build -t linkedin-mcp-server .` +- Install browser: `uv run patchright install chromium` -**Available MCP Tools:** +## Scraping Rules -| Tool | Description | -|------|-------------| -| `get_person_profile` | Get profile with explicit `sections` selection (experience, education, interests, honors, languages, contact_info, posts) | -| `get_company_profile` | Get company info with explicit `sections` selection (posts, jobs) | -| `get_company_posts` | Get recent posts from company feed | -| `get_job_details` | Get job posting details | -| `search_jobs` | Search jobs by keywords and location | -| `close_session` | Close browser session and clean up resources | -| `search_people` | Search for people by keywords and location | +- **One section = one navigation.** Each entry in `PERSON_SECTIONS` / `COMPANY_SECTIONS` (`scraping/fields.py`) maps to exactly one page navigation. Never combine multiple URLs behind a single section. +- **Minimize DOM dependence.** Prefer innerText and URL navigation over DOM selectors. When DOM access is unavoidable, use minimal generic selectors (`a[href*="/jobs/view/"]`) โ€” never class names tied to LinkedIn's layout. -**Tool Return Format:** +## Tool Return Format All scraping tools return: `{url, sections: {name: raw_text}}`. -Tools may also include: - -- `references: {section_name: [{kind, url, text?, context?}, ...]}` โ€” compact typed link targets for graph expansion. LinkedIn URLs are relative paths such as `/in/stickerdaniel/`; external URLs remain absolute. -- `section_errors: {section_name: {error_type, error_message, issue_template_path, runtime, ...}}` when one section failed but the overall tool call still completed. These diagnostics include a compact runtime summary plus trace/log locations; the full issue-ready markdown template is written to `issue_template_path`. -- `unknown_sections: [name, ...]` when unknown section names were passed. -- `job_ids: [id, ...]` for `search_jobs`. - -**Scraping Architecture (`scraping/`):** - -- `fields.py` - `PERSON_SECTIONS` and `COMPANY_SECTIONS` config dicts mapping section name to `(url_suffix, is_overlay)` -- `extractor.py` - `LinkedInExtractor` class using navigate-scroll-innerText pattern -- **One section = one navigation.** Each entry in `PERSON_SECTIONS` / `COMPANY_SECTIONS` maps to exactly one page navigation. Never combine multiple URLs behind a single section. -- **Minimize DOM dependence.** Prefer innerText and URL navigation over DOM selectors. When DOM access is unavoidable (e.g. extracting `href` attributes, finding scrollable containers), use minimal generic selectors (`a[href*="/jobs/view/"]`) โ€” never class names tied to LinkedIn's layout. - -**Core Subpackage (`core/`):** - -- `exceptions.py` - Exception hierarchy (AuthenticationError, RateLimitError, etc.) -- `browser.py` - `BrowserManager` with persistent context and cookie import/export -- `auth.py` - `is_logged_in()`, `wait_for_manual_login()`, `warm_up_browser()` -- `utils.py` - `detect_rate_limit()`, `scroll_to_bottom()`, `handle_modal_close()` - -**Dependency Injection (`dependencies.py`):** - -- `get_extractor()` โ€” async factory that acquires the singleton browser, runs `ensure_authenticated()`, and returns a `LinkedInExtractor` -- Injected into tool functions via `Depends(get_extractor)` (hidden from MCP tool schema) -- No cleanup needed โ€” browser lifecycle is managed by the server lifespan - -**Authentication Flow:** - -- Source runtime uses persistent browser profile at `~/.linkedin-mcp/profile/` -- `--login` creates a new source login generation and exports `cookies.json` -- Foreign runtimes derive their Linux bridge state under `~/.linkedin-mcp/runtime-profiles//profile/` -- By default, foreign runtimes fresh-bridge on every startup using the minimal working auth cookie subset and keep that bridged browser alive for the server lifetime -- Persistent derived runtime reuse remains experimental behind `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1` - -**Transport Modes:** - -- `stdio` (default) - Standard I/O for CLI MCP clients -- `streamable-http` - HTTP server mode for web-based MCP clients -- Tool calls are serialized within one server process to protect the shared - LinkedIn browser session. Concurrent client requests queue instead of running - in parallel. Use debug logging to inspect scraper lock wait/acquire/release. - -## Development Notes - -- **Python Version:** Requires Python 3.12+ -- **Package Manager:** Uses `uv` for fast dependency resolution -- **Browser:** Uses Patchright (anti-detection Playwright fork) with Chromium -- **Logging:** Configurable levels, JSON format for non-interactive mode -- **Error Handling:** Comprehensive exception handling for LinkedIn rate limits, captchas, etc. - -**Key Dependencies:** - -- `fastmcp` - MCP server framework -- `patchright` - Anti-detection browser automation (Playwright fork) - -**Configuration:** - -- CLI arguments with comprehensive help (`--help`) -- Browser profile stored at `~/.linkedin-mcp/profile/` - -**Commit Message Format:** - -- Follow conventional commits: `type(scope): subject` -- Types: feat, fix, docs, style, refactor, test, chore, perf, ci -- Keep subject <50 chars, imperative mood - -## Commit Message Guidelines - -**Commit Message Rules:** - -- Always use the commit message format type(scope): subject -- Types: feat, fix, docs, style, refactor, test, chore, perf, ci -- Keep subject <50 chars, imperative mood +Optional additional keys: +- `references: {section_name: [{kind, url, text?, context?}]}` โ€” LinkedIn URLs are relative paths +- `section_errors: {section_name: {error_type, error_message, issue_template_path, runtime, ...}}` +- `unknown_sections: [name, ...]` +- `job_ids: [id, ...]` (search_jobs only) ## Verifying Bug Reports -Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. When working in this repository, use the local code path with `uv run`, not `uvx`, so the running process reflects the files in your workspace. Use `uvx` only when intentionally verifying the packaged distribution. For live Docker investigations, always refresh the source session first with a fresh local `uv run -m linkedin_mcp_server --login` before testing each materially different approach. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. Start the server with HTTP transport in one terminal (this process is long-running and will block the shell), then in a second terminal call the tool via curl: +Always verify scraping bugs end-to-end against live LinkedIn, not just code analysis. Use `uv run`, not `uvx`, so the running process reflects your workspace. Use `uvx` only for packaged distribution verification. For live Docker investigations, refresh the source session first with `uv run -m linkedin_mcp_server --login` before testing each materially different approach. Assume a valid login profile already exists at `~/.linkedin-mcp/profile/`. ```bash -# Create or refresh the local source session -uv run -m linkedin_mcp_server --login - # Start server uv run -m linkedin_mcp_server --transport streamable-http --log-level DEBUG @@ -163,7 +47,7 @@ curl -s -D /tmp/mcp-headers -X POST http://127.0.0.1:8000/mcp \ # Extract the session ID from saved headers SESSION_ID=$(grep -i 'Mcp-Session-Id' /tmp/mcp-headers | awk '{print $2}' | tr -d '\r') -# Call a tool (use Mcp-Session-Id from previous response) +# Call a tool curl -s -X POST http://127.0.0.1:8000/mcp \ -H "Content-Type: application/json" \ -H "Accept: application/json, text/event-stream" \ @@ -182,23 +66,19 @@ gt submit # merge PR to trigger release workflow After the workflow completes, file a PR in the MCP registry to update the version. -## Important Development Notes - -Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or working on this repository, and strictly follow its guidelines and checklists. +## Development Workflow -### Development Workflow +Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or working on this repository. -- Never sign a PR or commit with Claude Code -- When implementing a new feature/fix, follow this process: - 1. Check open issues. If no issue exists for the feature, create one that follows the feature issue template. - 2. Create a new branch from `main` and name it `feature/issue-number-short-description` - 3. Implement the feature - 4. Test the feature - 5. Make sure the README.md, docs/docker-hub.md and AGENTS.md is updated with the new feature - 6. Create a draft PR with a short description of the feature/fix, and keep it in draft until it is ready to merge; only then convert it to a regular PR. - 7. First review the PR with ai agents. - 8. Manually review the PR and merge it if it's approved. Do not squash the commits. - 9. Delete the branch after the PR is merged. +- Include the model used for code generation in PR descriptions (e.g. "Generated with Claude Opus 4.6") +- Include a short prompt from the user messages that reproduces the PR diff. This tells the maintainer what was intended, which is often more useful than reviewing the full diff. +- When implementing a new feature/fix: + 1. Check open issues. If no issue exists, create one following the issue template. + 2. Branch from `main`: `feature/issue-number-short-description` + 3. Implement and test + 4. Update README.md, docs/docker-hub.md, and AGENTS.md if relevant + 5. Create a draft PR; only convert to regular PR when ready to merge + 6. Review with AI agents first, then manual review. Do not squash commits. ## PR Reviews @@ -210,30 +90,13 @@ gh api repos/{owner}/{repo}/pulls/{pr}/comments # inline comments gh api repos/{owner}/{repo}/issues/{pr}/comments # follow-up reviews ``` -## Greptile MCP - -The project includes a `.mcp.json` that configures the Greptile MCP server for Claude Code. Contributors need to set `GREPTILE_API_KEY` in their environment (get one at [app.greptile.com](https://app.greptile.com)). - -For Codex CLI, run: - -```bash -codex mcp add greptile --url https://api.greptile.com/mcp --bearer-token-env-var GREPTILE_API_KEY -``` - ## btca When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. **Available resources**: fastmcp, patchright, pytest, ruff, ty, uv, inquirer, pythonDotenv, pyperclip, preCommit -### Usage - ```bash btca ask -r -q "" -``` - -Use multiple `-r` flags to query multiple resources at once: - -```bash btca ask -r fastmcp -r patchright -q "How do I set up browser context with FastMCP tools?" ``` diff --git a/README.md b/README.md index 2b1f9206..4d65d975 100644 --- a/README.md +++ b/README.md @@ -192,35 +192,11 @@ Docker runs headless (no browser window), so you need to create a browser profil **Step 1: Create profile on the host (one-time setup)** ```bash -# Installed package usage uvx linkedin-scraper-mcp --login - -# Local development from this repo -uv run -m linkedin_mcp_server --login ``` -If you are debugging or verifying code changes in this repository, prefer `uv run -m linkedin_mcp_server ...` so the running process matches your workspace files. Use `uvx` when intentionally testing the packaged distribution. - This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. -After login, the host writes: - -- source profile: `~/.linkedin-mcp/profile/` -- portable cookies: `~/.linkedin-mcp/cookies.json` -- source session metadata: `~/.linkedin-mcp/source-state.json` - -Docker foreign runtimes derive a Linux runtime profile under: - -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` - -By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. This currently works more reliably than reusing a checkpointed derived runtime profile across restarts. - -Runtime traces/logs are captured into an ephemeral run directory by default and are automatically preserved only when a scrape failure occurs. Set `LINKEDIN_TRACE_MODE=always` to keep every run or `LINKEDIN_TRACE_MODE=off` to disable trace persistence entirely. - -If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. - **Step 2: Configure Claude Desktop with Docker** ```json @@ -239,7 +215,7 @@ If you want to experiment with persistent derived runtime reuse anyway, set `LIN ``` > [!NOTE] -> Docker now fresh-bridges by default on each startup. Persistent derived runtime reuse is still available behind `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`, but it remains experimental. +> Docker creates a fresh session on each startup. Sessions may expire over time โ€” run `uvx linkedin-scraper-mcp --login` again if you encounter authentication issues. > [!NOTE] > **Why can't I run `--login` in Docker?** Docker containers don't have a display server. Create a profile on your host using the [uvx setup](#-uvx-setup-recommended---universal) and mount it into Docker. @@ -505,7 +481,7 @@ uv run -m linkedin_mcp_server --transport streamable-http --host 127.0.0.1 --por Built with [FastMCP](https://gofastmcp.com/) and [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python). -โš ๏ธ Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. +Use in accordance with [LinkedIn's Terms of Service](https://www.linkedin.com/legal/user-agreement). Web scraping may violate LinkedIn's terms. This tool is for personal use only. ## License diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 9db753de..0f5d6693 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -23,23 +23,7 @@ Create a browser profile locally, then mount it into Docker. uvx linkedin-scraper-mcp --login ``` -This creates the source session artifacts on the host: - -- `~/.linkedin-mcp/profile/` -- `~/.linkedin-mcp/cookies.json` -- `~/.linkedin-mcp/source-state.json` - -Docker foreign runtimes derive a Linux runtime profile under: - -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/profile/` -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/storage-state.json` -- `~/.linkedin-mcp/runtime-profiles/linux-amd64-container/runtime-state.json` - -By default, Docker now creates a fresh bridged Linux session on every startup using the minimal working auth cookie subset (`li_at`, `JSESSIONID`, `bcookie`, `bscookie`, `lidc`) and keeps that session alive for the server lifetime. - -Runtime traces/logs are captured into an ephemeral run directory by default and are automatically preserved only when a scrape failure occurs. Set `LINKEDIN_TRACE_MODE=always` to keep every run or `LINKEDIN_TRACE_MODE=off` to disable trace persistence entirely. - -If you want to experiment with persistent derived runtime reuse anyway, set `LINKEDIN_EXPERIMENTAL_PERSIST_DERIVED_SESSION=1`. In that mode, the first Docker run performs an internal checkpoint restart after `/feed/` succeeds and later Docker runs try to reuse the committed Linux runtime profile directly. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. **Step 2: Configure Claude Desktop with Docker** From 2dbe6f0338ec5e43fdbd3b4039de2c027486ba1b Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 02:19:11 +0100 Subject: [PATCH 526/565] docs(agents): Add commit message format --- AGENTS.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/AGENTS.md b/AGENTS.md index a9f2cc07..2cc4d46b 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -66,6 +66,12 @@ gt submit # merge PR to trigger release workflow After the workflow completes, file a PR in the MCP registry to update the version. +## Commit Messages + +- Follow conventional commits: `type(scope): subject` +- Types: feat, fix, docs, style, refactor, test, chore, perf, ci +- Keep subject <50 chars, imperative mood + ## Development Workflow Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or working on this repository. From ed51f925df2d441e42b88cf538e82c73eb87e83c Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 02:21:45 +0100 Subject: [PATCH 527/565] docs: Add brief Docker auth bridging explanation --- README.md | 2 +- docs/docker-hub.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4d65d975..11fcdcd5 100644 --- a/README.md +++ b/README.md @@ -195,7 +195,7 @@ Docker runs headless (no browser window), so you need to create a browser profil uvx linkedin-scraper-mcp --login ``` -This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. **Step 2: Configure Claude Desktop with Docker** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 0f5d6693..16170a6e 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -23,7 +23,7 @@ Create a browser profile locally, then mount it into Docker. uvx linkedin-scraper-mcp --login ``` -This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. **Step 2: Configure Claude Desktop with Docker** From 78cc5708892b6f3a2a87060974ba5b969820bec2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 02:23:03 +0100 Subject: [PATCH 528/565] docs: Mention uvx as stable alternative to Docker --- README.md | 2 +- docs/docker-hub.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 11fcdcd5..766224ab 100644 --- a/README.md +++ b/README.md @@ -195,7 +195,7 @@ Docker runs headless (no browser window), so you need to create a browser profil uvx linkedin-scraper-mcp --login ``` -This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. If you experience stability issues with Docker, consider using the [uvx setup](#-uvx-setup-recommended---universal) instead. **Step 2: Configure Claude Desktop with Docker** diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 16170a6e..2ceb526d 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -23,7 +23,7 @@ Create a browser profile locally, then mount it into Docker. uvx linkedin-scraper-mcp --login ``` -This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. +This opens a browser window where you log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile and cookies are saved under `~/.linkedin-mcp/`. On startup, Docker derives a Linux browser profile from your host cookies and creates a fresh session each time. For better stability, consider the [uvx setup](https://github.com/stickerdaniel/linkedin-mcp-server#-uvx-setup-recommended---universal). **Step 2: Configure Claude Desktop with Docker** From 7dc48bfc600ca86abf21f74d60022c392e9a3531 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 16 Mar 2026 02:23:53 +0100 Subject: [PATCH 529/565] docs: Fix stale AGENTS.md tool table reference in CONTRIBUTING.md --- AGENTS.md | 2 +- CONTRIBUTING.md | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 2cc4d46b..1349b03d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -82,7 +82,7 @@ Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or worki 1. Check open issues. If no issue exists, create one following the issue template. 2. Branch from `main`: `feature/issue-number-short-description` 3. Implement and test - 4. Update README.md, docs/docker-hub.md, and AGENTS.md if relevant + 4. Update README.md and docs/docker-hub.md if relevant 5. Create a draft PR; only convert to regular PR when ready to merge 6. Review with AI agents first, then manual review. Do not squash commits. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2678943a..c6e737f5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -83,7 +83,6 @@ When adding a section to an existing tool (e.g., adding "certifications" to `get ### Docs - [ ] Update tool table in `README.md` -- [ ] Update tool table in `AGENTS.md` - [ ] Update features list in `docs/docker-hub.md` - [ ] Update tools array/description in `manifest.json` @@ -112,7 +111,6 @@ When adding an entirely new MCP tool (e.g., `search_companies`): ### Docs - [ ] Update tool table in `README.md` -- [ ] Update tool table in `AGENTS.md` - [ ] Update features list in `docs/docker-hub.md` - [ ] Add tool to `tools` array in `manifest.json` From f02830dd21ce5390c52bcd5e04dc04fde91be407 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Mar 2026 00:44:07 +0000 Subject: [PATCH 530/565] chore(deps): update ci dependencies --- .github/workflows/ci.yml | 4 ++-- .github/workflows/claude.yml | 2 +- .github/workflows/release.yml | 8 ++++---- Dockerfile | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0849e5b1..4c8559fc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Set up uv - uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 + uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7 with: enable-cache: true @@ -35,7 +35,7 @@ jobs: steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 + - uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7 with: enable-cache: true diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 09995875..a59dcda6 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -32,7 +32,7 @@ jobs: - name: Run Claude Code id: claude - uses: anthropics/claude-code-action@26ec041249acb0a944c0a47b6c0c13f05dbc5b44 # v1 + uses: anthropics/claude-code-action@df37d2f0760a4b5683a6e617c9325bc1a36443f6 # v1 with: claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b19c7c11..5042d964 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: fetch-depth: 2 # Need to compare with previous commit - name: Set up uv - uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 + uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7 with: enable-cache: true @@ -63,12 +63,12 @@ jobs: fetch-depth: 0 - name: Set up uv - uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7 + uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7 with: enable-cache: true - name: Set up Bun - uses: oven-sh/setup-bun@ecf28ddc73e819eb6fa29df6b34ef8921c743461 # v2 + uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2 - name: Update manifest.json and docker-compose.yml version run: | @@ -184,7 +184,7 @@ jobs: - name: Create GitHub Release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2 with: tag_name: v${{ env.VERSION }} files: | diff --git a/Dockerfile b/Dockerfile index f252f272..b88f9b15 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.14-slim-bookworm@sha256:5404df00cf00e6e7273375f415651837b4d192ac6859c44d3b740888ac798c99 +FROM python:3.14-slim-bookworm@sha256:55e465cb7e50cd1d7217fcb5386aa87d0356ca2cd790872142ef68d9ef6812b4 # Install uv package manager -COPY --from=ghcr.io/astral-sh/uv:latest@sha256:10902f58a1606787602f303954cea099626a4adb02acbac4c69920fe9d278f82 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:latest@sha256:3472e43b4e738cf911c99d41bb34331280efad54c73b1def654a6227bb59b2b4 /uv /uvx /bin/ # Create non-root user first (matching original pwuser from Playwright image) RUN useradd -m -s /bin/bash pwuser From 38c5254800fb670f742bc8366a49f11681bf046e Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 17:25:20 +0100 Subject: [PATCH 531/565] docs(github): agent instructions for issues --- AGENTS.md | 3 ++- CONTRIBUTING.md | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 1349b03d..ccca48a7 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -25,6 +25,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co All scraping tools return: `{url, sections: {name: raw_text}}`. Optional additional keys: + - `references: {section_name: [{kind, url, text?, context?}]}` โ€” LinkedIn URLs are relative paths - `section_errors: {section_name: {error_type, error_message, issue_template_path, runtime, ...}}` - `unknown_sections: [name, ...]` @@ -79,7 +80,7 @@ Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or worki - Include the model used for code generation in PR descriptions (e.g. "Generated with Claude Opus 4.6") - Include a short prompt from the user messages that reproduces the PR diff. This tells the maintainer what was intended, which is often more useful than reviewing the full diff. - When implementing a new feature/fix: - 1. Check open issues. If no issue exists, create one following the issue template. + 1. Check open issues. If no issue exists, create one following the templates in `.github/ISSUE_TEMPLATE/`. Fill in every section. Use "N/A" if not applicable. 2. Branch from `main`: `feature/issue-number-short-description` 3. Implement and test 4. Update README.md and docs/docker-hub.md if relevant diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c6e737f5..cfed5832 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -122,7 +122,7 @@ When adding an entirely new MCP tool (e.g., `search_companies`): ## Workflow -1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) describing the feature or bug +1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) using the correct GitHub issue template. Fill in every section. Use "N/A" if not applicable. 2. Create a branch: `feature/-` or `fix/-` 3. Implement, test, and update docs (see checklists above) 4. Open a PR โ€” AI agents review first, then manual review From c0a92383b62b60a847a43e2ec442c7d360eb4333 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 17:32:47 +0100 Subject: [PATCH 532/565] docs(templates): update issue templates for clarity and conciseness --- .github/ISSUE_TEMPLATE/bug_report.md | 49 ++++++------------- .github/ISSUE_TEMPLATE/documentation_issue.md | 40 ++------------- .github/ISSUE_TEMPLATE/feature_request.md | 18 +++---- 3 files changed, 30 insertions(+), 77 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 0ba70198..7c31a00a 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,44 +1,27 @@ --- name: Bug Report -about: Create a report to help us improve the LinkedIn MCP server +about: Something is broken or not working as expected title: '[BUG] ' labels: ['bug'] assignees: '' --- -## Installation Method -- [ ] Docker (specify docker image version/tag): _._._ -- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._ -- [ ] Local Python setup - -## When does the error occur? -- [ ] At startup -- [ ] During tool call (specify which tool): - - [ ] get_person_profile - - [ ] get_company_profile - - [ ] get_job_details - - [ ] search_jobs - - [ ] close_session - -## MCP Client Configuration - -**Claude Desktop Config** (`/Users/[username]/Library/Application Support/Claude/claude_desktop_config.json`): -```json -{ - "mcpServers": { - "linkedin": { - // Your configuration here (remove sensitive credentials) - } - } -} -``` +## Setup + + + +## What Happened + + + +## Steps to Reproduce + + + +## Logs + + -## MCP Client Logs -**Claude Desktop Logs** (`/Users/[username]/Library/Logs/Claude/mcp-server-LinkedIn MCP Server.log`): ``` -Paste relevant log entries here ``` - -## Error Description -What went wrong and what did you expect to happen? diff --git a/.github/ISSUE_TEMPLATE/documentation_issue.md b/.github/ISSUE_TEMPLATE/documentation_issue.md index 816b05a3..5cdfce9a 100644 --- a/.github/ISSUE_TEMPLATE/documentation_issue.md +++ b/.github/ISSUE_TEMPLATE/documentation_issue.md @@ -1,50 +1,20 @@ --- name: Documentation Issue -about: Report problems with README, setup instructions, or other documentation +about: Report incorrect, missing, or confusing documentation title: '[DOCS] ' labels: ['documentation'] assignees: '' --- -## Documentation Problem -**What documentation issue did you find?** -- [ ] Incorrect/outdated setup instructions -- [ ] Missing information -- [ ] Unclear/confusing explanations -- [ ] Broken links -- [ ] Example code doesn't work -- [ ] Missing prerequisites -- [ ] Inconsistent information -- [ ] Typos/grammar issues -- [ ] Other: ___________ - ## Location -**Where is the documentation issue?** -- [ ] README.md -- [ ] Code comments -- [ ] Error messages -- [ ] CLI help text -- [ ] Other: ___________ -**Specific section/line:** -___________ + -## Current Documentation -**What does the documentation currently say?** -``` -Paste the current text or link to the specific section -``` +## Problem -## Problem Description -**What's wrong or confusing about it?** -A clear description of why this documentation is problematic. + ## Suggested Fix -**What should it say instead?** -``` -Suggested replacement text or improvements -``` -## Additional Context -Add any other context, screenshots, or examples that would help improve the documentation. + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 31328c57..32f9933c 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,20 +1,20 @@ --- name: Feature Request -about: Suggest an idea for the LinkedIn MCP server +about: Suggest an improvement or new capability title: '[FEATURE] ' labels: ['enhancement'] assignees: '' --- -## Feature description -Describe what you want to happen. +## Feature Description -## Use case -Why this feature is useful. + -## Suggested implementation -If you have a specific idea for how to implement this feature, please describe it here. +## Use Case -## Additional context -Add any other details that would help. + + +## Suggested Approach + + From a284e8397f0bd06aa7a5f4a4ac764cc81f25081a Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 17:39:13 +0100 Subject: [PATCH 533/565] docs: reconcile optional section handling across templates and docs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Templates say "delete if not applicable" for optional sections โ€” align AGENTS.md and CONTRIBUTING.md to match instead of saying "N/A". --- AGENTS.md | 2 +- CONTRIBUTING.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index ccca48a7..39a4f0b4 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -80,7 +80,7 @@ Always read [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or worki - Include the model used for code generation in PR descriptions (e.g. "Generated with Claude Opus 4.6") - Include a short prompt from the user messages that reproduces the PR diff. This tells the maintainer what was intended, which is often more useful than reviewing the full diff. - When implementing a new feature/fix: - 1. Check open issues. If no issue exists, create one following the templates in `.github/ISSUE_TEMPLATE/`. Fill in every section. Use "N/A" if not applicable. + 1. Check open issues. If no issue exists, create one following the templates in `.github/ISSUE_TEMPLATE/`. Fill in every section; delete optional sections if not applicable. 2. Branch from `main`: `feature/issue-number-short-description` 3. Implement and test 4. Update README.md and docs/docker-hub.md if relevant diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cfed5832..94e61dc9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -122,7 +122,7 @@ When adding an entirely new MCP tool (e.g., `search_companies`): ## Workflow -1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) using the correct GitHub issue template. Fill in every section. Use "N/A" if not applicable. +1. [Open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) using the correct GitHub issue template. Fill in every section; delete optional sections if not applicable. 2. Create a branch: `feature/-` or `fix/-` 3. Implement, test, and update docs (see checklists above) 4. Open a PR โ€” AI agents review first, then manual review From 2c9d72865567d3e289f54c477d99768acc337392 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 19:07:41 +0100 Subject: [PATCH 534/565] Convert issue templates to GitHub forms - Replace legacy markdown templates with YAML issue forms - Preserve bug, docs, and feature issue metadata and labels --- .github/ISSUE_TEMPLATE/bug_report.md | 27 -------------- .github/ISSUE_TEMPLATE/bug_report.yml | 35 +++++++++++++++++++ .github/ISSUE_TEMPLATE/documentation_issue.md | 20 ----------- .../ISSUE_TEMPLATE/documentation_issue.yml | 27 ++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ----------- .github/ISSUE_TEMPLATE/feature_request.yml | 26 ++++++++++++++ 6 files changed, 88 insertions(+), 67 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml delete mode 100644 .github/ISSUE_TEMPLATE/documentation_issue.md create mode 100644 .github/ISSUE_TEMPLATE/documentation_issue.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 7c31a00a..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: Bug Report -about: Something is broken or not working as expected -title: '[BUG] ' -labels: ['bug'] -assignees: '' - ---- - -## Setup - - - -## What Happened - - - -## Steps to Reproduce - - - -## Logs - - - -``` -``` diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..99fcf740 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,35 @@ +name: Bug Report +description: Something is broken or not working as expected +title: "[BUG] " +labels: ["bug"] +body: + - type: input + id: setup + attributes: + label: Setup + description: How you run the server, which MCP client, and what OS. + placeholder: "e.g. uvx on macOS, Claude Desktop" + validations: + required: true + - type: textarea + id: what-happened + attributes: + label: What Happened + description: What broke and what you expected instead. + validations: + required: true + - type: textarea + id: steps-to-reproduce + attributes: + label: Steps to Reproduce + description: Minimal steps to trigger the bug. Include the tool name and arguments if relevant. + validations: + required: true + - type: textarea + id: logs + attributes: + label: Logs + description: Paste relevant log output. Remove credentials. + render: shell + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/documentation_issue.md b/.github/ISSUE_TEMPLATE/documentation_issue.md deleted file mode 100644 index 5cdfce9a..00000000 --- a/.github/ISSUE_TEMPLATE/documentation_issue.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Documentation Issue -about: Report incorrect, missing, or confusing documentation -title: '[DOCS] ' -labels: ['documentation'] -assignees: '' - ---- - -## Location - - - -## Problem - - - -## Suggested Fix - - diff --git a/.github/ISSUE_TEMPLATE/documentation_issue.yml b/.github/ISSUE_TEMPLATE/documentation_issue.yml new file mode 100644 index 00000000..ed4e1a73 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation_issue.yml @@ -0,0 +1,27 @@ +name: Documentation Issue +description: Report incorrect, missing, or confusing documentation +title: "[DOCS] " +labels: ["documentation"] +body: + - type: input + id: location + attributes: + label: Location + description: File path, section name, or URL. + placeholder: "e.g. README.md, Docker setup section" + validations: + required: true + - type: textarea + id: problem + attributes: + label: Problem + description: What's wrong or confusing. + validations: + required: true + - type: textarea + id: suggested-fix + attributes: + label: Suggested Fix + description: What it should say instead. Leave blank if unsure. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 32f9933c..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature Request -about: Suggest an improvement or new capability -title: '[FEATURE] ' -labels: ['enhancement'] -assignees: '' - ---- - -## Feature Description - - - -## Use Case - - - -## Suggested Approach - - diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..60b89eef --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,26 @@ +name: Feature Request +description: Suggest an improvement or new capability +title: "[FEATURE] " +labels: ["enhancement"] +body: + - type: textarea + id: feature-description + attributes: + label: Feature Description + description: What you want to happen. + validations: + required: true + - type: textarea + id: use-case + attributes: + label: Use Case + description: Why this is useful. + validations: + required: true + - type: textarea + id: suggested-approach + attributes: + label: Suggested Approach + description: How you'd implement it. Leave blank if unsure. + validations: + required: false From dee1abdbcc4b598f7c6e1aaffdd99aa89436dd54 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 23:27:53 +0100 Subject: [PATCH 535/565] fix(rate-limit): ignore hidden recaptcha --- linkedin_mcp_server/core/utils.py | 35 ++++++++++++++++++++++++------- tests/test_core_utils.py | 33 +++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 7 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 7f3a3ebe..9dfcba7a 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -35,16 +35,37 @@ async def detect_rate_limit(page: Page) -> None: suggested_wait_time=30, ) - # Check for CAPTCHA + # Check for CAPTCHA. + # LinkedIn embeds hidden/invisible reCAPTCHA support iframes on normal pages, + # so only treat a CAPTCHA iframe as a challenge when it is actually visible. try: - captcha = await page.locator( + captcha_frames = page.locator( 'iframe[title*="captcha" i], iframe[src*="captcha" i]' - ).count() - if captcha > 0: - raise RateLimitError( - "CAPTCHA challenge detected. Manual intervention required.", - suggested_wait_time=30, + ) + captcha_count = await captcha_frames.count() + for index in range(captcha_count): + frame = captcha_frames.nth(index) + frame_info = await frame.evaluate( + """el => { + const style = window.getComputedStyle(el); + const rect = el.getBoundingClientRect(); + const src = el.getAttribute('src') || ''; + return { + src, + visible: + rect.width > 0 && + rect.height > 0 && + style.display !== 'none' && + style.visibility !== 'hidden' && + style.opacity !== '0', + }; + }""" ) + if frame_info["visible"] and "size=invisible" not in frame_info["src"]: + raise RateLimitError( + "CAPTCHA challenge detected. Manual intervention required.", + suggested_wait_time=30, + ) except RateLimitError: raise except PlaywrightTimeoutError: diff --git a/tests/test_core_utils.py b/tests/test_core_utils.py index be87f8ee..c74a595b 100644 --- a/tests/test_core_utils.py +++ b/tests/test_core_utils.py @@ -35,6 +35,14 @@ async def test_authwall_url_raises(self, mock_page): async def test_captcha_iframe_raises(self, mock_page): captcha_locator = MagicMock() captcha_locator.count = AsyncMock(return_value=1) + captcha_frame = MagicMock() + captcha_frame.evaluate = AsyncMock( + return_value={ + "src": "https://example.com/captcha", + "visible": True, + } + ) + captcha_locator.nth = MagicMock(return_value=captcha_frame) main_locator = MagicMock() main_locator.count = AsyncMock(return_value=0) @@ -48,6 +56,31 @@ def locator_side_effect(selector): with pytest.raises(RateLimitError, match="CAPTCHA"): await detect_rate_limit(mock_page) + async def test_hidden_invisible_recaptcha_does_not_raise(self, mock_page): + captcha_locator = MagicMock() + captcha_locator.count = AsyncMock(return_value=1) + captcha_frame = MagicMock() + captcha_frame.evaluate = AsyncMock( + return_value={ + "src": "https://www.google.com/recaptcha/enterprise/anchor?size=invisible", + "visible": False, + } + ) + captcha_locator.nth = MagicMock(return_value=captcha_frame) + + main_locator = MagicMock() + main_locator.count = AsyncMock(return_value=1) + + def locator_side_effect(selector): + if "captcha" in selector: + return captcha_locator + if selector == "main": + return main_locator + return MagicMock(count=AsyncMock(return_value=0)) + + mock_page.locator = MagicMock(side_effect=locator_side_effect) + await detect_rate_limit(mock_page) + async def test_normal_page_with_main_skips_body_heuristic(self, mock_page): """A normal page with
should NOT trigger body text checks.""" main_locator = MagicMock() From 8619232acd8fbafe053a04d0e31295a380a3cda1 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 23:36:16 +0100 Subject: [PATCH 536/565] fix(rate-limit): drop captcha iframe heuristic Refs #136 --- linkedin_mcp_server/core/utils.py | 41 +----------------- tests/test_core_utils.py | 71 +------------------------------ 2 files changed, 2 insertions(+), 110 deletions(-) diff --git a/linkedin_mcp_server/core/utils.py b/linkedin_mcp_server/core/utils.py index 9dfcba7a..786bd637 100644 --- a/linkedin_mcp_server/core/utils.py +++ b/linkedin_mcp_server/core/utils.py @@ -15,8 +15,7 @@ async def detect_rate_limit(page: Page) -> None: Checks (in order): 1. URL contains /checkpoint or /authwall (security challenge) - 2. Page contains CAPTCHA iframe (bot detection) - 3. Body text contains rate-limit phrases on error-shaped pages (throttling) + 2. Body text contains rate-limit phrases on error-shaped pages (throttling) The body-text heuristic only runs on pages without a ``
`` element and with short body text (<2000 chars), since real rate-limit pages are @@ -35,44 +34,6 @@ async def detect_rate_limit(page: Page) -> None: suggested_wait_time=30, ) - # Check for CAPTCHA. - # LinkedIn embeds hidden/invisible reCAPTCHA support iframes on normal pages, - # so only treat a CAPTCHA iframe as a challenge when it is actually visible. - try: - captcha_frames = page.locator( - 'iframe[title*="captcha" i], iframe[src*="captcha" i]' - ) - captcha_count = await captcha_frames.count() - for index in range(captcha_count): - frame = captcha_frames.nth(index) - frame_info = await frame.evaluate( - """el => { - const style = window.getComputedStyle(el); - const rect = el.getBoundingClientRect(); - const src = el.getAttribute('src') || ''; - return { - src, - visible: - rect.width > 0 && - rect.height > 0 && - style.display !== 'none' && - style.visibility !== 'hidden' && - style.opacity !== '0', - }; - }""" - ) - if frame_info["visible"] and "size=invisible" not in frame_info["src"]: - raise RateLimitError( - "CAPTCHA challenge detected. Manual intervention required.", - suggested_wait_time=30, - ) - except RateLimitError: - raise - except PlaywrightTimeoutError: - pass - except Exception as e: - logger.debug("Error checking for CAPTCHA: %s", e) - # Check for rate limit messages โ€” only on error-shaped pages. # Real rate-limit pages have no
element and short body text. # Normal LinkedIn pages (profiles, jobs) have
and long content diff --git a/tests/test_core_utils.py b/tests/test_core_utils.py index c74a595b..802df4be 100644 --- a/tests/test_core_utils.py +++ b/tests/test_core_utils.py @@ -32,63 +32,11 @@ async def test_authwall_url_raises(self, mock_page): with pytest.raises(RateLimitError, match="security checkpoint"): await detect_rate_limit(mock_page) - async def test_captcha_iframe_raises(self, mock_page): - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=1) - captcha_frame = MagicMock() - captcha_frame.evaluate = AsyncMock( - return_value={ - "src": "https://example.com/captcha", - "visible": True, - } - ) - captcha_locator.nth = MagicMock(return_value=captcha_frame) - - main_locator = MagicMock() - main_locator.count = AsyncMock(return_value=0) - - def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator - return main_locator - - mock_page.locator = MagicMock(side_effect=locator_side_effect) - with pytest.raises(RateLimitError, match="CAPTCHA"): - await detect_rate_limit(mock_page) - - async def test_hidden_invisible_recaptcha_does_not_raise(self, mock_page): - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=1) - captcha_frame = MagicMock() - captcha_frame.evaluate = AsyncMock( - return_value={ - "src": "https://www.google.com/recaptcha/enterprise/anchor?size=invisible", - "visible": False, - } - ) - captcha_locator.nth = MagicMock(return_value=captcha_frame) - - main_locator = MagicMock() - main_locator.count = AsyncMock(return_value=1) - - def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator - if selector == "main": - return main_locator - return MagicMock(count=AsyncMock(return_value=0)) - - mock_page.locator = MagicMock(side_effect=locator_side_effect) - await detect_rate_limit(mock_page) - async def test_normal_page_with_main_skips_body_heuristic(self, mock_page): """A normal page with
should NOT trigger body text checks.""" main_locator = MagicMock() main_locator.count = AsyncMock(return_value=1) - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=0) - body_locator = MagicMock() # Body contains a phrase that would false-positive body_locator.inner_text = AsyncMock( @@ -96,8 +44,6 @@ async def test_normal_page_with_main_skips_body_heuristic(self, mock_page): ) def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator if selector == "main": return main_locator if selector == "body": @@ -113,17 +59,12 @@ async def test_error_page_without_main_triggers_heuristic(self, mock_page): main_locator = MagicMock() main_locator.count = AsyncMock(return_value=0) - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=0) - body_locator = MagicMock() body_locator.inner_text = AsyncMock( return_value="Too many requests. Slow down." ) def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator if selector == "main": return main_locator if selector == "body": @@ -139,9 +80,6 @@ async def test_long_body_without_main_does_not_trigger(self, mock_page): main_locator = MagicMock() main_locator.count = AsyncMock(return_value=0) - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=0) - body_locator = MagicMock() # Long body with a matching phrase buried in content body_locator.inner_text = AsyncMock( @@ -149,8 +87,6 @@ async def test_long_body_without_main_does_not_trigger(self, mock_page): ) def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator if selector == "main": return main_locator if selector == "body": @@ -161,17 +97,12 @@ def locator_side_effect(selector): # Should NOT raise โ€” body is too long to be an error page await detect_rate_limit(mock_page) - async def test_normal_url_no_captcha_no_error_passes(self, mock_page): + async def test_normal_url_no_error_passes(self, mock_page): """A clean normal page passes all checks without raising.""" main_locator = MagicMock() main_locator.count = AsyncMock(return_value=1) - captcha_locator = MagicMock() - captcha_locator.count = AsyncMock(return_value=0) - def locator_side_effect(selector): - if "captcha" in selector: - return captcha_locator if selector == "main": return main_locator return MagicMock(count=AsyncMock(return_value=0)) From 9e02515f717ae5961e3dff1101a9c0f433dc413d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Fri, 20 Mar 2026 23:49:02 +0100 Subject: [PATCH 537/565] chore: bump version to 4.4.2 --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2df7dfaf..02812b07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.4.1" +version = "4.4.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 0c2018a2..b1e15cb8 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.4.1" +version = "4.4.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From b4ec2a4fc7ace1e3259eec43e2348eb5a29b3378 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 20 Mar 2026 22:51:21 +0000 Subject: [PATCH 538/565] chore: update manifest.json and docker-compose.yml to v4.4.2 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 47e5778a..e1a7c331 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.4.1 + image: stickerdaniel/linkedin-mcp-server:4.4.2 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 92b35b65..77a7d94c 100644 --- a/manifest.json +++ b/manifest.json @@ -2,9 +2,9 @@ "dxt_version": "0.1", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.4.1", + "version": "4.4.2", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.1", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.2", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -26,7 +26,7 @@ "run", "--rm", "-i", "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.4.1" + "stickerdaniel/linkedin-mcp-server:4.4.2" ] } }, From 658eb123ff90f9411eb4c7694c2a89e06095890f Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 13:00:50 +0100 Subject: [PATCH 539/565] fix(diagnostics): align issue report with bug form --- linkedin_mcp_server/error_diagnostics.py | 92 +++++++++++++++--------- tests/test_error_diagnostics.py | 63 +++++++++++++++- 2 files changed, 118 insertions(+), 37 deletions(-) diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 6c46b46f..0f79c24c 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -135,16 +135,45 @@ def _render_issue_template(payload: dict[str, Any]) -> str: has_existing_issues = bool(existing_issues) issue_search_skipped = bool(payload.get("issue_search_skipped")) installation_lines = _installation_method_lines(runtime) - tool_lines = _tool_lines(payload) + tool_name = _tool_name_for_context(payload) or "unknown" + setup_lines = [ + f"- Installation method: {_installation_method_summary(runtime)}", + "- MCP client: Local curl-based MCP HTTP client against the server's streamable-http transport", + f"- Operating system / runtime: {runtime['current_runtime_id']}", + ] + if runtime.get("trace_dir"): + setup_lines.append(f"- Trace artifacts directory: {runtime['trace_dir']}") + if runtime.get("log_path"): + setup_lines.append(f"- Server log path: {runtime['log_path']}") + + what_happened_lines = [ + f"- Suggested title: {payload['suggested_issue_title']}", + f"- Context: {payload['context']}", + f"- Tool: {tool_name}", + f"- Section: {payload.get('section_name') or 'n/a'}", + f"- Target URL: {payload.get('target_url') or 'n/a'}", + f"- Error: {payload['error_type']}: {payload['error_message']}", + "- Expected behavior: The MCP tool call should complete and return structured scraping output.", + ] + + reproduction_lines = [ + "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", + "2. Start the server again using the same installation method and debug env vars used for this run.", + f"3. Call `{tool_name}` again with the same target URL and section selection.", + ( + "4. If one of the listed open issues matches, post the gist as a comment there as additional information." + if has_existing_issues + else "4. If no existing issue matches, open a new GitHub bug report with the information above." + ), + ] return ( "\n".join( [ "# LinkedIn MCP scrape failure", "", "## File This Issue", - f"- Suggested title: {payload['suggested_issue_title']}", "- Read this generated file before posting.", - "- Copy the sections below into the GitHub bug report template.", + "- Copy the `Setup`, `What Happened`, `Steps to Reproduce`, and `Logs` sections below into the matching GitHub bug report fields.", "- Attach this generated markdown file, the server log, and the trace artifacts directory.", ( "- Review the existing open issues below first. If one matches, post the gist as a comment there instead of opening a new issue." @@ -168,33 +197,26 @@ def _render_issue_template(payload: dict[str, Any]) -> str: ) ), "", - "## Installation Method", - *installation_lines, - "", - "## When does the error occur?", - "- [ ] At startup", - "- [x] During tool call (specify which tool):", - *tool_lines, + "## Setup", + *setup_lines, "", - "## MCP Client Configuration", + "## What Happened", + *what_happened_lines, "", - "**Client used for reproduction**:", - "```text", - "Local curl-based MCP HTTP client against the server's streamable-http transport", - "```", + "## Steps to Reproduce", + *reproduction_lines, "", - "## MCP Client Logs", + "## Logs", "```text", "See attached server log and trace artifacts.", "```", "", - "## Error Description", - f"Context: {payload['context']}", - f"Section: {payload.get('section_name') or 'n/a'}", - f"Target URL: {payload.get('target_url') or 'n/a'}", - f"Error: {payload['error_type']}: {payload['error_message']}", + "## Additional Diagnostics", "", - "## Runtime Diagnostics", + "### Installation Method Details", + *installation_lines, + "", + "### Runtime Diagnostics", f"- Hostname: {runtime['hostname']}", f"- Current runtime: {runtime['current_runtime_id']}", f"- Source profile: {runtime['source_profile_dir']}", @@ -205,7 +227,7 @@ def _render_issue_template(payload: dict[str, Any]) -> str: f"- Server log: {runtime['log_path'] or 'not enabled'}", f"- Suggested gist command: {runtime['suggested_gist_command'] or 'not available'}", "", - "## Session State", + "### Session State", "```json", json.dumps( { @@ -217,27 +239,17 @@ def _render_issue_template(payload: dict[str, Any]) -> str: ), "```", "", - "## Attachment Checklist", + "### Attachment Checklist", "- Read this generated markdown file and use it as the issue body/context.", "- Attach this generated markdown file itself.", "- Attach the server log if available.", "- Attach the trace screenshots/trace.jsonl if available.", "- Optional: run the suggested gist command below to upload the text artifacts as a single shareable bundle.", "", - "## Suggested Gist Command", + "### Suggested Gist Command", "```bash", runtime["suggested_gist_command"] or "# gist command unavailable", "```", - "", - "## Reproduction", - "1. Run a fresh local `uv run -m linkedin_mcp_server --login`.", - "2. Start the server again using the same installation method and debug env vars used for this run.", - "3. Re-run the failing MCP tool call.", - ( - "4. If one of the listed open issues matches, post the gist as a comment there as additional information." - if has_existing_issues - else "4. If no existing issue matches, open a new GitHub bug report with the information above." - ), ] ) + "\n" @@ -354,6 +366,16 @@ def _installation_method_lines(runtime: dict[str, Any]) -> list[str]: ] +def _installation_method_summary(runtime: dict[str, Any]) -> str: + current_runtime_id = str(runtime.get("current_runtime_id") or "") + if "container" in current_runtime_id: + return ( + "Docker using `stickerdaniel/linkedin-mcp-server:latest` with " + "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" + ) + return "Local Python setup" + + def _tool_lines(payload: dict[str, Any]) -> list[str]: selected_tool = _tool_name_for_context(payload) tool_names = [ diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index 7213e1b3..5cb40630 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -8,6 +8,29 @@ ) +def _required_issue_form_labels() -> list[str]: + labels: list[str] = [] + current_label: str | None = None + in_body = False + lines = Path(".github/ISSUE_TEMPLATE/bug_report.yml").read_text().splitlines() + for line in lines: + stripped = line.strip() + if stripped == "body:": + in_body = True + continue + if not in_body: + continue + if stripped.startswith("- type:"): + current_label = None + continue + if stripped.startswith("label:"): + current_label = stripped.removeprefix("label:").strip().strip('"') + continue + if stripped == "required: true" and current_label: + labels.append(current_label) + return labels + + def test_build_issue_diagnostics_includes_existing_issues(monkeypatch, tmp_path): monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) monkeypatch.setattr( @@ -38,6 +61,10 @@ def test_build_issue_diagnostics_includes_existing_issues(monkeypatch, tmp_path) assert "## Existing Open Issues" in issue_body assert "#220" in issue_body assert "post the gist as a comment there" in issue_body + assert "## Setup" in issue_body + assert "## What Happened" in issue_body + assert "## Steps to Reproduce" in issue_body + assert "## Logs" in issue_body def test_format_tool_error_with_diagnostics_prefers_existing_issue_comment_flow(): @@ -136,6 +163,37 @@ def fail(*args, **kwargs): assert "search was skipped in async server context" in issue_body +def test_build_issue_diagnostics_covers_required_bug_report_fields( + monkeypatch, tmp_path +): + monkeypatch.setenv("USER_DATA_DIR", str(tmp_path / "profile")) + monkeypatch.setattr( + "linkedin_mcp_server.error_diagnostics._find_existing_issues", + lambda payload: [], + ) + + diagnostics = build_issue_diagnostics( + RuntimeError("boom"), + context="search_jobs", + target_url="https://www.linkedin.com/jobs/search/?keywords=python", + section_name="search_results", + ) + + issue_body = Path(diagnostics["issue_template_path"]).read_text() + + for label in _required_issue_form_labels(): + assert f"## {label}" in issue_body + + assert "- Installation method:" in issue_body + assert "- MCP client:" in issue_body + assert "- Error:" in issue_body + assert "- Expected behavior:" in issue_body + assert "1. Run a fresh local `uv run -m linkedin_mcp_server --login`." in issue_body + assert "Call `search_jobs` again" in issue_body + assert "## Additional Diagnostics" in issue_body + assert "### Session State" in issue_body + + def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( monkeypatch, tmp_path ): @@ -157,9 +215,10 @@ def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( ) issue_body = Path(diagnostics["issue_template_path"]).read_text() + assert "- Installation method: Docker using" in issue_body assert "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" in issue_body assert "- [x] Docker" in issue_body - assert " - [x] search_jobs" in issue_body + assert "- Tool: search_jobs" in issue_body def test_build_issue_diagnostics_keeps_sensitive_runtime_details_out_of_mcp_payload( @@ -184,5 +243,5 @@ def test_build_issue_diagnostics_keeps_sensitive_runtime_details_out_of_mcp_payl assert "source_profile_dir" not in diagnostics["runtime"] assert diagnostics["issue_search_skipped"] is False issue_body = Path(diagnostics["issue_template_path"]).read_text() - assert "## Runtime Diagnostics" in issue_body + assert "### Runtime Diagnostics" in issue_body assert "Source profile:" in issue_body From 059a84c4c3eb71caeea156569fef9be9475e8e96 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 13:16:30 +0100 Subject: [PATCH 540/565] fix(diagnostics): address review feedback --- linkedin_mcp_server/error_diagnostics.py | 21 ++------------------- tests/test_error_diagnostics.py | 5 ++++- 2 files changed, 6 insertions(+), 20 deletions(-) diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 0f79c24c..6e8d95bf 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -360,7 +360,7 @@ def _installation_method_lines(runtime: dict[str, Any]) -> list[str]: current_runtime_id = str(runtime.get("current_runtime_id") or "") docker_checked = "x" if "container" in current_runtime_id else " " return [ - f"- [{docker_checked}] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:latest` with `~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`", + f"- [{docker_checked}] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:` with `~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`", "- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._", "- [ ] Local Python setup", ] @@ -370,29 +370,12 @@ def _installation_method_summary(runtime: dict[str, Any]) -> str: current_runtime_id = str(runtime.get("current_runtime_id") or "") if "container" in current_runtime_id: return ( - "Docker using `stickerdaniel/linkedin-mcp-server:latest` with " + "Docker using `stickerdaniel/linkedin-mcp-server:` with " "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" ) return "Local Python setup" -def _tool_lines(payload: dict[str, Any]) -> list[str]: - selected_tool = _tool_name_for_context(payload) - tool_names = [ - "get_person_profile", - "get_company_profile", - "get_company_posts", - "get_job_details", - "search_jobs", - "search_people", - "close_session", - ] - return [ - f" - [{'x' if tool_name == selected_tool else ' '}] {tool_name}" - for tool_name in tool_names - ] - - def _tool_name_for_context(payload: dict[str, Any]) -> str | None: context = str(payload.get("context") or "") if context in { diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index 5cb40630..b176d95a 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -12,7 +12,10 @@ def _required_issue_form_labels() -> list[str]: labels: list[str] = [] current_label: str | None = None in_body = False - lines = Path(".github/ISSUE_TEMPLATE/bug_report.yml").read_text().splitlines() + issue_form_path = ( + Path(__file__).resolve().parents[1] / ".github/ISSUE_TEMPLATE/bug_report.yml" + ) + lines = issue_form_path.read_text().splitlines() for line in lines: stripped = line.strip() if stripped == "body:": From 38069de1fe57affe06ea1e6dee82c7f736497387 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 15:52:08 +0100 Subject: [PATCH 541/565] chore: update AI agent models to gpt-5.4 --- .opencode/agents/code-reviewer.md | 4 ++-- .opencode/agents/code-simplifier.md | 4 ++-- .opencode/agents/comment-analyzer.md | 4 ++-- .opencode/agents/pr-test-analyzer.md | 4 ++-- .opencode/agents/silent-failure-hunter.md | 4 ++-- .opencode/agents/type-design-analyzer.md | 4 ++-- btca.config.jsonc | 4 ++-- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.opencode/agents/code-reviewer.md b/.opencode/agents/code-reviewer.md index 3ab541a3..689c501a 100644 --- a/.opencode/agents/code-reviewer.md +++ b/.opencode/agents/code-reviewer.md @@ -2,9 +2,9 @@ name: code-reviewer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#22c55e' description: | Use this agent when you need to review code for adherence to project guidelines, style guides, and best practices. This agent should be used proactively after writing or modifying code, especially before committing changes or creating pull requests. It will check for style violations, potential issues, and ensure code follows the established patterns in CLAUDE.md. Also the agent needs to know which files to focus on for the review. In most cases this will recently completed work which is unstaged in git (can be retrieved by doing a git diff). However there can be cases where this is different, make sure to specify this as the agent input when calling the agent. diff --git a/.opencode/agents/code-simplifier.md b/.opencode/agents/code-simplifier.md index 325929c4..9f06909f 100644 --- a/.opencode/agents/code-simplifier.md +++ b/.opencode/agents/code-simplifier.md @@ -2,9 +2,9 @@ name: code-simplifier mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#3b82f6' description: | Use this agent when code has been written or modified and needs to be simplified for clarity, consistency, and maintainability while preserving all functionality. This agent should be triggered automatically after completing a coding task or writing a logical chunk of code. It simplifies code by following project best practices while retaining all functionality. The agent focuses only on recently modified code unless instructed otherwise. diff --git a/.opencode/agents/comment-analyzer.md b/.opencode/agents/comment-analyzer.md index d55b0b2c..bfbb84ed 100644 --- a/.opencode/agents/comment-analyzer.md +++ b/.opencode/agents/comment-analyzer.md @@ -2,9 +2,9 @@ name: comment-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#10b981' description: | Use this agent when you need to analyze code comments for accuracy, completeness, and long-term maintainability. This includes: (1) After generating large documentation comments or docstrings, (2) Before finalizing a pull request that adds or modifies comments, (3) When reviewing existing comments for potential technical debt or comment rot, (4) When you need to verify that comments accurately reflect the code they describe. diff --git a/.opencode/agents/pr-test-analyzer.md b/.opencode/agents/pr-test-analyzer.md index 76a8c726..645b8c05 100644 --- a/.opencode/agents/pr-test-analyzer.md +++ b/.opencode/agents/pr-test-analyzer.md @@ -2,9 +2,9 @@ name: pr-test-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#06b6d4' description: | Use this agent when you need to review a pull request for test coverage quality and completeness. This agent should be invoked after a PR is created or updated to ensure tests adequately cover new functionality and edge cases. Examples: diff --git a/.opencode/agents/silent-failure-hunter.md b/.opencode/agents/silent-failure-hunter.md index 7fafe9e9..3627b284 100644 --- a/.opencode/agents/silent-failure-hunter.md +++ b/.opencode/agents/silent-failure-hunter.md @@ -2,9 +2,9 @@ name: silent-failure-hunter mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#eab308' description: | Use this agent when reviewing code changes in a pull request to identify silent failures, inadequate error handling, and inappropriate fallback behavior. This agent should be invoked proactively after completing a logical chunk of work that involves error handling, catch blocks, fallback logic, or any code that could potentially suppress errors. Examples: diff --git a/.opencode/agents/type-design-analyzer.md b/.opencode/agents/type-design-analyzer.md index 746fef41..79b81dfa 100644 --- a/.opencode/agents/type-design-analyzer.md +++ b/.opencode/agents/type-design-analyzer.md @@ -2,9 +2,9 @@ name: type-design-analyzer mode: subagent # https://models.dev/ -model: 'openai/gpt-5.3-codex' +model: 'openai/gpt-5.4' variant: 'high' -# model: 'github-copilot/gpt-5.3-codex' +# model: 'github-copilot/gpt-5.4' color: '#ec4899' description: | Use this agent when you need expert analysis of type design in your codebase. Specifically use it: (1) when introducing a new type to ensure it follows best practices for encapsulation and invariant expression, (2) during pull request creation to review all types being added, (3) when refactoring existing types to improve their design quality. The agent will provide both qualitative feedback and quantitative ratings on encapsulation, invariant expression, usefulness, and enforcement. diff --git a/btca.config.jsonc b/btca.config.jsonc index 269e4202..7a871b65 100644 --- a/btca.config.jsonc +++ b/btca.config.jsonc @@ -73,6 +73,6 @@ "specialNotes": "Framework for managing pre-commit hooks." } ], - "model": "claude-haiku-4.5", - "provider": "github-copilot" + "model": "gpt-5.4-mini", + "provider": "openai" } From 7cc5fee15a2e1dae370a50e96a0adec1b6e151f6 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 16:59:26 +0100 Subject: [PATCH 542/565] feat(mcpb): add one-click bootstrap --- .github/workflows/release.yml | 10 +- .mcpbignore | 34 ++ README.md | 69 ++-- RELEASE_NOTES_TEMPLATE.md | 14 +- assets/icons/linkedin.png | Bin 0 -> 15086 bytes docs/docker-hub.md | 2 +- linkedin_mcp_server/__init__.py | 2 +- linkedin_mcp_server/bootstrap.py | 396 +++++++++++++++++++++++ linkedin_mcp_server/cli_main.py | 92 +----- linkedin_mcp_server/dependencies.py | 39 ++- linkedin_mcp_server/error_diagnostics.py | 6 +- linkedin_mcp_server/error_handler.py | 37 +++ linkedin_mcp_server/exceptions.py | 28 ++ linkedin_mcp_server/server.py | 19 +- linkedin_mcp_server/tools/company.py | 17 +- linkedin_mcp_server/tools/job.py | 16 +- linkedin_mcp_server/tools/person.py | 17 +- manifest.json | 44 ++- tests/conftest.py | 3 + tests/test_bootstrap.py | 151 +++++++++ tests/test_cli_main.py | 17 +- tests/test_error_diagnostics.py | 31 ++ tests/test_tools.py | 4 + 23 files changed, 842 insertions(+), 206 deletions(-) create mode 100644 .mcpbignore create mode 100644 assets/icons/linkedin.png create mode 100644 linkedin_mcp_server/bootstrap.py create mode 100644 tests/test_bootstrap.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5042d964..01db9960 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,7 +74,6 @@ jobs: run: | set -e sed -i 's/"version": ".*"/"version": "'$VERSION'"/' manifest.json - sed -i 's/stickerdaniel\/linkedin-mcp-server:[^"]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' manifest.json sed -i 's/stickerdaniel\/linkedin-mcp-server:[^ ]*/stickerdaniel\/linkedin-mcp-server:'$VERSION'/' docker-compose.yml echo "โœ… Updated manifest.json and docker-compose.yml to version $VERSION" @@ -171,10 +170,11 @@ jobs: - name: Optimize uv cache for CI run: uv cache prune --ci - - name: Build DXT extension + - name: Validate and build MCP bundle run: | - bunx @anthropic-ai/dxt pack - mv linkedin-mcp-server.dxt linkedin-mcp-server-v$VERSION.dxt + bunx @anthropic-ai/mcpb validate + bunx @anthropic-ai/mcpb pack + mv linkedin-mcp-server.mcpb linkedin-mcp-server-v$VERSION.mcpb - name: Generate release notes run: | @@ -188,7 +188,7 @@ jobs: with: tag_name: v${{ env.VERSION }} files: | - *.dxt + *.mcpb generate_release_notes: true draft: false prerelease: false diff --git a/.mcpbignore b/.mcpbignore new file mode 100644 index 00000000..7b370236 --- /dev/null +++ b/.mcpbignore @@ -0,0 +1,34 @@ +.git +.github +.claude +.cursor +.gemini +.mcp.json +.opencode +.vscode +.venv +.venv.* +.pytest_cache +.ruff_cache +.coverage +.coverage.* +.debug +__pycache__ +build +dist +htmlcov +docs +scripts +tests +AGENTS.md +CLAUDE.md +CONTRIBUTING.md +docker-compose.yml +Dockerfile +RELEASE_NOTES_TEMPLATE.md +btca.config.jsonc +pytest.ini +renovate.json +*.mcpb +*.dxt +.DS_Store diff --git a/README.md b/README.md index 766224ab..a6cbff53 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your [![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-recommended---universal) [![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup) -[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_DXT-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) +[![Install MCP Bundle](https://img.shields.io/badge/Claude_Desktop_MCPB-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-mcp-bundle-formerly-dxt) [![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) @@ -60,19 +60,11 @@ When one section fails but the overall tool call still completes, responses may ## ๐Ÿš€ uvx Setup (Recommended - Universal) -**Prerequisites:** [Install uv](https://docs.astral.sh/uv/getting-started/installation/) and run `uvx patchright install chromium` to set up the browser. +**Prerequisites:** [Install uv](https://docs.astral.sh/uv/getting-started/installation/). ### Installation -**Step 1: Create a session (first time only)** - -```bash -uvx linkedin-scraper-mcp --login -``` - -This opens a browser for you to log in manually (5 minute timeout for 2FA, captcha, etc.). The browser profile is saved to `~/.linkedin-mcp/profile/`. - -**Step 2: Client Configuration:** +**Client Configuration** ```json { @@ -85,8 +77,10 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc } ``` +The server starts quickly, prepares the shared Patchright Chromium browser cache in the background under `~/.linkedin-mcp/patchright-browsers`, and opens a LinkedIn login browser window on the first tool call that needs authentication. + > [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again +> Early tool calls may return a setup/authentication-in-progress error until browser setup or login finishes. If you prefer to create a session explicitly, run `uvx linkedin-scraper-mcp --login`. ### uvx Setup Help @@ -117,9 +111,6 @@ This opens a browser for you to log in manually (5 minute timeout for 2FA, captc **Basic Usage Examples:** ```bash -# Create a session interactively -uvx linkedin-scraper-mcp --login - # Run with debug logging uvx linkedin-scraper-mcp --log-level DEBUG ``` @@ -158,6 +149,7 @@ parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs. **Session issues:** - Browser profile is stored at `~/.linkedin-mcp/profile/` +- Managed browser downloads are cached at `~/.linkedin-mcp/patchright-browsers/` - Make sure you have only one active LinkedIn session at a time **Login issues:** @@ -301,40 +293,29 @@ Runtime server logs are emitted by FastMCP/Uvicorn.

-## ๐Ÿ“ฆ Claude Desktop (DXT Extension) +## ๐Ÿ“ฆ Claude Desktop MCP Bundle (formerly DXT) -**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed & running +**Prerequisites:** [Claude Desktop](https://claude.ai/download). **One-click installation** for Claude Desktop users: -1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) -2. Double-click to install into Claude Desktop -3. Create a session: `uvx linkedin-scraper-mcp --login` +1. Download the latest `.mcpb` artifact from [releases](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) +2. Double-click to install it into Claude Desktop +3. Restart Claude Desktop +4. Call any LinkedIn tool -> [!NOTE] -> Sessions may expire over time. If you encounter authentication issues, run `uvx linkedin-scraper-mcp --login` again. +On startup, the MCP Bundle starts preparing the shared Patchright Chromium browser cache in the background. If you call a tool too early, Claude will surface a setup-in-progress error. On the first tool call that needs authentication, the server opens a LinkedIn login browser window and asks you to retry after sign-in. -### DXT Extension Setup Help +### MCP Bundle Setup Help
โ— Troubleshooting -**First-time setup timeout:** +**First-time setup behavior:** -- Claude Desktop has a ~60 second connection timeout -- If the Docker image isn't cached, the pull may exceed this timeout -- **Fix:** Pre-pull the image before first use: - - ```bash - docker pull stickerdaniel/linkedin-mcp-server:2.3.0 - ``` - -- Then restart Claude Desktop - -**Docker issues:** - -- Make sure [Docker](https://www.docker.com/get-started/) is installed -- Check if Docker is running: `docker ps` +- Claude Desktop starts the bundle immediately; browser setup continues in the background +- If the Patchright Chromium browser is still downloading, retry the tool after a short wait +- Managed browser downloads are shared under `~/.linkedin-mcp/patchright-browsers/` **Login issues:** @@ -373,19 +354,15 @@ curl -LsSf https://astral.sh/uv/install.sh | sh uv sync uv sync --group dev -# 4. Install Patchright browser -uv run patchright install chromium - -# 5. Install pre-commit hooks +# 4. Install pre-commit hooks uv run pre-commit install -# 6. Create a session (first time only) -uv run -m linkedin_mcp_server --login - -# 7. Start the server +# 5. Start the server uv run -m linkedin_mcp_server ``` +The local server uses the same managed-runtime flow as MCPB and `uvx`: it prepares the Patchright Chromium browser cache in the background and opens LinkedIn login on the first auth-requiring tool call. You can still run `uv run -m linkedin_mcp_server --login` when you want to create the session explicitly. + ### Local Setup Help
diff --git a/RELEASE_NOTES_TEMPLATE.md b/RELEASE_NOTES_TEMPLATE.md index 9b240fb1..8fa3317d 100644 --- a/RELEASE_NOTES_TEMPLATE.md +++ b/RELEASE_NOTES_TEMPLATE.md @@ -11,14 +11,10 @@ To pull this specific version, run: docker pull stickerdaniel/linkedin-mcp-server:${VERSION} ``` -## ๐Ÿ“ฆ Update DXT Extension Installation +## ๐Ÿ“ฆ Update MCP Bundle Installation **For Claude Desktop users:** -1. Download the `.dxt` file below -2. Pre-pull the Docker image to avoid timeout issues: - ```bash - docker pull stickerdaniel/linkedin-mcp-server:${VERSION} - ``` -3. Double-click the `.dxt` file to install in Claude Desktop -4. Restart Claude Desktop +1. Download the `.mcpb` file below +2. Double-click the `.mcpb` file to install in Claude Desktop +3. Restart Claude Desktop -> **Note:** The pre-pull step is important because Claude Desktop has a ~60 second connection timeout. Without pre-pulling, the initial image download may exceed this limit. +> **Note:** MCP Bundles (MCPB) are the renamed successor to DXT/Desktop Extensions. diff --git a/assets/icons/linkedin.png b/assets/icons/linkedin.png new file mode 100644 index 0000000000000000000000000000000000000000..dc258c51be6b476290f74c3c37e08dfe71da45e9 GIT binary patch literal 15086 zcmeIZXHZjN`!{$JLKkUDN1~|oCcQ^RKzfl50)iANBE1Pg6h(^k-V~7DI|xJ(q)2a4 z1f)vG&;^qBB>23$|NXQxJF~Mh`+NvVIOp8=eU;zU6RM@5L_x+x1^@uXePsn50DyuY zp#TXH__pUg@)vxAxala}0ZRK=R>3a{j}7nJsH+3q;Aav5^4cDN;cfw6OyCOu2(lmm z0r(EV{VxmppQ}(Hi{StGjJuKisOA*_NEzN&xc$HzviXxVjpcDV_RmT|Mv*j2DChVK zk{2W-jrm!e$!ZsfD#^I7qv-8x2{?Yx7GL8!xF$G(`g+)z88GpY!jDLS1yFaq)CttF^S-AG3#CRuG ze*I&sHmlP&?vjgxlPr$mUgcv?G_%!i^O)tI7RB||SlDgQKwwk|AgG>jvRdcHW3?_$ zy|_NM#MbS@nilg^^z)9xr>qY#ALTomhmFs=7*clJmOX^g>FFE}{nEGJX7Witx2HYm z<_DJm*HM0jWMO90SEOLnZ3*zM`Ws~YvnqFx;Hd>rk6PbI_lvO|VMJ?SFHJ;$Dq11N zZw!GIk}5Ogr|}&{AN1(2|UipcB@5@iFAZO>5=C<(5X>)Tn zCKwydxbsoreEXOi79*?+9cgk>17|0k1Ywr|bn`kL_fkn*L_?MQTd67h=5VTDWEvSK z_iUOG({D_MlUK$j(Q)qjY+m@G(?v|JYqioj=?{ZKtt#>iRuW63`)h5BfGBgZJ)g~j zu!fnZJ!*dEJ2J7uucC%0n-n4k=_fX>x&xehrL9^HXSY-P?887paU1qhM_KJDk@bvW zO@1`rcga7C-DdrZoLwCBYWoKzoaW#3vE&ZGcVzW*M{fX>kXSufFFy zQQ0-4zJ7zL`)J!|tw@h5>WTvHysT2Zd7}c0)nCG&q?cd(C+8UOOmjx&5iK@3fiumw zv&mYB5iOrJTUvLTSWbg~8%E-{MBE}=v^g_#YdmWzJX8u|8>hSgn337YmD|f$|Fr0k<7_|MBS^JD7*A^>Za z@N6eD=a>08zox08ULS9oCp8?-7mS{Y0=%a2x*h)3YbpWf#|9wcu*DG6J8qB9yWh@M z0$0;&4nxt&*t2QQSnhL_AuI}!X}{#N+$N@JZzjsJ0Wwy7$)=sH=Q<3Z%RuH~eT!#3 zY3Gc@x1zTKvaR^n)P9uHy!$_diI zST1b-W9h3(JS_w@JhM9vo90hnJKw&u7I{$1x?A^?AnP&&K2=6sh(CGf8>EoUd{UV- zeVMGt_?V}c;@U^GbDD1nXz?df{*lpSOZFiJ{;d*)So*QeSVGya)f@NrT=dI?;X_K^ z=p8mFV%eMwz@x(YK0=Pdy6PK|06$b@|TELJNff4@u6MZ>lFC*^;9vCMeUd;Uslk9|;W1 z7eo_a@;|X-L&q?e+aokfw2SDwX4Ow6t$Ll!UNPo%F$PsxmsKsf1}#DY@Ty`;4+pC4 zLh91z#h?57FYdNqei~4GukD9(9c%MlrR8vpK`0P*ozNxtx2M!8&P>F#Rj)60dHjCYrT*I1zQpal*ay57Kor7A0QEF}pYc{> zig(Se?(M0Vk*_}$XY@`gKO5B8Z}ZR?K4fd46;r^emU0dN$s!%Tw*7Z|_xWx{bdaRD zh^5!F?K@t4ti~HYYi?++7qatPX&5{K#_TAM8KS0>4pV)m{dX#>pIbjzg%CF;(w|kZ zFJ9Q$QSP!Gx&FR_4 zn1n6O|0%t4%46C_AiLfXk5z2AzT@)U-~BZX6$95n+!YTjbKKbfE>m;*we{VM4 z&tKHpez16pV-^by9QZ1ftoYM#vig@=O{uL+w}F4~BR;b+{2gd4@z)gLQ{*iFnTtQITJ5KKh8phoy26IM?JD z(72SRUC!R)h@0MbZZKL-NlfT(Hgc%B<@QQf5Zot<1_DKnDkM*-GG`tYj^1Q+Hf5PY zQ*#Acg^+Tx;uNnKfS?{d`Cjky&VQ!WvEqJh>cf^fuBB8>8|I)j&|(HzgQVQ)4{KJEBq|7yyRHB(SbU4y0^<<5vmk>N|cvRng|g~WR^wg0u9Vk{hT9HKCr zq#CSW#e!es7+hm=S(A=UaD2pBF{}`}fEoAZN9bME#~A|k%b@zqB{2VJiS8;pF8TGS z$=CaF|6P^T%t>dZb0lSo3 zKXpuS$7zZJEx6H&jr%wEFeAEmh;h3H;f##mgg7mM7vc^n=|hCtsx;Jq?F^FyRf_+b zdQ-Yei@yxoSPLf3<4XvQbXh}E$f*bdo;OO)Jiup$_a=s*ia^Vy+%&M4=ZhsDeT30BC0rdB;iZHZnqrj99ect?LjOMre7jcga6bXGiv$`MHIs5<3S^G^ryqp9U$ z#R!8Im%_2P+eRehiqasTvw}c{XR3E{Y7ll1vGh=elr&e}5zA!+7e^w3!KX!$OFn>Y z{;MKa(UgkD5Jbyi2)%0T7E`Zb1Rm=+$DnT4?6tDx_iVvfJ#UR7d{shK1G`HRHkxdR z%br9)@l(B+9R7RvYRces<++W|{66oS#qC6U@PJGa0BN`6-!@%YOr&6?|G=gGk-F1s zF@d|}FvaF_7#=&u*TH*81y)Msui(wn-ovrdxZ|CXd#Cs`Xuuad#1F|RTj-^P5`~hhNr#8lO-5srD}LiwcxLnK zMI1bI1we*H(SZi~Os%)OuJ@?$%!YCD!=Vyk$)x^H1NK6%WbQR2Vh!hTKC1B(rWqSb z?PZXp+B}MVF?rUb^ZQpaXTmj{e#YUvjYis^5&9{=aW>XxZnom<#8(F?mJ-P zX;?N>64!8?tXcd>;HqfOlc;1Pp-puUhulbVDqt7(F^whg1k;dF|W3aDMk-=>tLYLT7I zXwvU$n*Zyof*DI!C8F%B^Q-GrHcvhsymbe^Fe@kl>9-@tb!lYV{7HTLX10_qIen!x zO+HIS(QXaXy)@nz)i&Ua40z?$19d_#;|))ZRJX&SbH9Me;lUf(>mI`C+h&KBxua1R zT>5h^#ymTZS?C5{&FrH)Jp-6-Sj08tR6lEbJ+gK$J7Q8v_iy|P>m{kFGA(pXr}_k& z^le;>XO|Dy0!-aJv#;{@j$bAFVH+K~1~!Pc3oU7p{~W?7kHGo7y2R&k%rWy;ri%NNHy{ zJ1<<8Iyf;p7_0mDflpmv%BQK>+?I7zo<*MfM^;#%gJ(jd^jXZGACoHhFr8@!Se!gJO@Mi>wOF##|%be!ckMtZgzs zFI-HM*-8bi(4sBrmL=;XwFJoD#mwC0()H2mhoBFG(BkHU%qubj^q?}#ApvpUTzi@t z-yj*IIYQTTjGYe(vBXG#{`fE;I@1aJ0F%LMw}$R|e0pB)L5V?}kLKe+4aFh<|8E&2 z=pX-|q{3Nz!J_ySnJN{jZDYz))5TD8-(IvdZmhfWX*`?LqB-BwYPHv=L;R|f5reGg z!>`-M>(dI-7=j==oPr_y_^AeB;0P zyy7ZUn&kJLZC{^=-C$6-)+Q(h-Gsv}uP*5WbM6J~(9w*5>N}_7l17`)%8#u38lSzp zV>@-bW$bm2eF*y^tc}_=@|VlwJ;<#CCtFq;VMC%AchJlX*}nO66^UqrE&l8ecAaaT z8OC_Dm*m#0t&?Zi|L9|y_>(-|+gayzC3kY~nQQIbJ`GW1EWApS-Ru7BDwx3L1YRrv z?Zh3jPLIFcjQu8y&FDIRQO{oG+0-i}4fbm(9()b=`^Xj}V^1zDcP)j{-ng`p+IWq@ zTG9#YYz z8&&_48sPnygWkGb+T>LGb?9=>t}c^`lfF=j{FPl`ZF|9b3f* zSFs*HuWcWeaTcKs6i-%W>}x94BNAuo4Q3qt@=gUNn{JSN8ryd*cnUgV)E+-XwRfSt z#aSbp@b^Z2z18Y~rBg;iYT?k|1uXW_Y?@}S$6hakSBuvfHz`1#T2WSh5=DT04l=I# z!L+Z>ld{#~JLA0>fttQq3sY3p7D z_9^?BtbBAryu;b)ODcxdOs_JLlWODowb|XKZI8F;5&0fsGMm>w;%;)`cz_=0&PZ>`6kd5PnyMKwv}M zE{Q@I>BY+FqA-?ItMs-COvdb`la(|pf4BTJwPWKYM$(EcWFO5{1fcv?L_&(Be?_D? zTX$-{G9#J*OZ@gQ!Q5yqbN4wTd07>adgl(}knOrfy4e_p2j;Wy#+L0kF!Q^10%{7i ztMyJeSn)G1i`Lt2JMI>s|1gN@ned3+g|Gq>D_zSVsIRD`<>Op{{ui{_bD>i$}(v7n-imUNUIi8#WAzj{kWWKl1Zi zmr*MA$wtO+SzOYTma zgaLIciEC8KpO9%{4^3ERy-#<#GhWlk8xk?7Sa@gInoS6>@g%w>4Qo9ru~z`>BK*V- zx^EyIzq_&O>1Xg8fEADHHS=+{>%T+PX*mTeKdl5bPzjnqXU<4juG|HaeL#Q!j5I=# zpQJxkkJKHw*Z2{ z1}*0UzcL;AqvRr42g06R=-UO8rreFDN`2p<^&7zT=OIHfa{Q1Sp93Z_PoEWR`#j>1 zgQ=E4N+SccM4l1DOt4~bWh?H}=f_?OFyuy`Y&`pbdyrG!S(E&aJ=UtW1oAi{-z$7Y z=ulTl$Ib#Y}dg}S&Jv(rER?kc$%_;7J^LU13Nede|u_f0yJ*A^P0 zYaUKONCmJYDX1Xet5DZEEals?;O1Kz$MKoGvyNu0TUORw>Bhj3m%IEI)lBPi~R|FcQ;j^bI5F*EbR(EaQ&bH^74rk z3-g6TKtk+2-4a99ovkBmNIRWKZ$8rhDM~LsRYBM|K(*&4P#u?<&JMO4Ovjs>@-u%e z1Z=50wQID-;DIySk7v8v2!%;Vo07D&JPXX5N?K|h=a1%Xk^&VJ4<`Mnzmn^K~ z1V*^p3W^B1*|eJLN|d#_s6K0H^!ziM=u6H+iHi=|n0Y&&P_8f_ zDyhK51)c_RanY$0D5&;7aAN0a+MWvF3cRY7hZ?7sS>V}!CFN(i%Y@5q5yho4N8MD0rE8g;%>7MdbPE|Vnxb@^B=3~79) z7+)8#3q=eLC+f>Rnv6-ewa5ee3~IM-;L?|ZOE?o3elk_S;zvfL4k&-Vp6mH$njmVj zz-o@<`aO$jae9u&yrmJrz}#f`&4Mdi5al69K{}WYmtgIu6HlM9ul>4`M=vQ;fT$hP zN8Y$#bBwV1slN5kEb4t)fa$$TCip)9T0wfLdGKkE5mLvIb!hM<7@*NZ@u6uY;a`n> ze{qyotZe6AL?jT^BB|Q{rh!yY)~#KcTDczSuevi%BlmTL;vEpx$fY`n1h3tTZP%h! zu0`#9%p(F*-aw55&S6n0D57yWY!FA*#3j%n7KdX3#KNAcjlz%}^+WhZ65>1b@fN`w zy*Pl1ha(-IIEkoOC}0Ch_L?iS&umM~p@=b?Ps#hPv=+iO6ZFt6a-5B2BR&m1c_o&2 z2a0e@t7@QkhJIiEq$>)gxFpzNI)&O29uj7H6C_Ov3~dEMFL^aDY%#=D_n=o%Xk% zgZ4rDbHL*TEwBc&1|usl@6jho%B*=*#=jX0%ndo}kb|s`qK0g)XWHb9gv8$m>=^i1 zd7Pn(_dw=R&~Qe^IXh{MC$HA>3CHVjf>EZy-1l{x#od`J3x&I{8qi0)*>35Q{o8^q z;{`Gc4%tfG#uR?t0)IcAE-F9UBi-L8?6;=pC5$%*p^mp3dKny!gE(yEMK0nHdzVFlTO`t65qUd*P`X(7CgyD+K~aVz*It_qb1ZQRwD& zT<~s1JX0;}M%|e$yF-jHc|0wlQqRSA&ckcE4Kw?PoaSem_MblK;8Nkv=fj76P({Di zNgpoNFJP_Yi^?Ea!ZGHK+BV}MKAAp7Z=Hxpj(AnO3-)iCc09{ZW2u*zjc(H7GYK@Z z_wTWdmno7Xwd(o8;JVOOS5#Zd8#fun#8HYA&9Rlj^+6v|bqW{-s##79)EL*Cm8ESx zPc$9Z{Q5cYi9HRB>{jXF%9O3lkI^Pf?2})GUn8Ead7B+K*U$I9<9aOCq@!824hKX` z($2tiKq9D$_^?laxlu)<5o>BMD?hDR;&cP@nEse;G*ICC^t$-1g9T&h6vvHb*H{}d z?ub?uDB|?v-WvewLDt#yJNH%Ow5)5dS?PY>j?#LY^~m}Sn7;zX>9m)W>-t-VPV{{W zC_+JP3-X5K8|b43fFQ3kZ~cHoGvdWJ$B{&PGb8+&O_n^$a<#D+8t%K)7Z7xI z_ASN&UR}wF;SZRfCZ|E(jK|J^_0|svrR}+%U185=b<8)JePV0@EZ!HrIkR7Q81gfZ zf922{=0EE~1iy9z0>8$R3ex8fHR~DktmE1aTr%7J{AWKUEhDFNVYhHss>a17aWT9* zIy$bIn57_R^)c90)abiN4YuMDD?=U}c0X$s`QxR}E*fd*u$d$!pU7oWB}tv^W95dP z9o}_i&oU3xoLCP6qM9Z19>HkX2WqVCYV-C-U0ryu+h|3wywTrnwAtD>kC?u<{C7** zOO0_~d3i1Jy#?OqfL*==D61-w`7fWf*RH-Yx|*)hs9j1k)$w#IfeyML=%froedJ|a z;S<56Kf*V6>}nIq2Kz`I)pX{T;Z@_`61XhlkZztM`~3d+(0hV(f$IB&-AWKrfVvob z4g9#CgWT>)HVdHoaUVf%v+>u%XC-rjAj|)j1^w~OFY(vf0+l>{eSQe+)L^pD^c`=5 zz32f6NFN~5C)O_8)Ap<{(F?UPzYw(kM~w8wF;j{_`W;cYREm9 zs%YPZ^7x5$DMh~R$My=m2d(V;B)d!Z!AGKhm`>+sJ#3Na@j4q%)75M?nA{0Prf7{a zC2Mz73lxSSbl8VG=$08PjHeG)v5$WTux1 zE<;HHq2QsPmp~?}eeT>UJ5KZoTV#KFVru|5UCKy2IdS2$7fnaXT=-qEnoz}S2*z!W zZ7IGWPqfOiXr~;qvF)LEn9N@4=j98JZH)c?DKRY%LAkn;0tpr(byRZ~kc0`T>g1~9 zEykset&_cL^CgAoKb`(9J6?YdQqpI_moFeU9ccl_c00C*U@-BkgvH7JQ|*n>4Sb zndd52Ql6~_R(r!+28>T%!I0jvs}#6&YB4(yU3#$BC4XF3i=5_p788?t)1fY>JrbI6 zAih~2-dY3}UJFmkYp+1~AtTv|f1RXz-|yzUF)9$qX&~H4kXLT4ZD?}0dWx2q>veuG z^og4U=&kCT$*6~c(8$zzrm;Hw@wBV#Dxq4c0jb8RA=wcOP!-B00miHysf@vczArF! zTTZA|zlzst7u`e7=DS_uqmI8E4`(CPR-nASIF7 zYfwy`4@c4(d3PEl|Ds5*@k8t3ta8qq;5om-l3SczGbFD*&(`REv%kf<^6AggeMWWT zj$d5ZiRy4su7&Hs^~X0&S%?z>)3nhjQTtM>*KsMOM*QOei|#!}bi9Ha?{*#|KFbW3 zUIo;8zI-vnC4}TwYf?t_+#Iu~;v60e8Ux50+eqaBmm#6J1N4~t1rfaxei!H2eL{iYB*l~@3Btjr97qi(u9n>A4lS~nCb0UJcdbaY_R{Q*SNPf6)# z!sbn|S`te`hh;h*yMG5s$N)h>!#*Z}xokPiExX}hWVTWJK6#PgvW3S`N*`S$JickH z+l39;{v9lrl-q&boU5?#o4`wp!Z8Jh^!4eo1>_Sa!@4SD8g%Xgs*g8F6>RW{6z7{D z{eZQbYL>*V8@I|-e$~^8TRTl2s!nIc$Y)h7QVL13|A&x793f1Xm--oFf)i$%6@~R* zQ}2vmEK>$;-Xh1cbushf|1*Tmb<-Wzm*Y%l*PP3zfS^3}7q0+NBq0-?5-{h#i}D5| zUIj}7zZ8IKZ0*=HL>TBdd3p2-pn`_pq{cP-srj2zeT->=`4qYA!-%4y;ytUlUsr&z zuA6|N#o#4yz>vIC9H0RTXVSI zMfwC|+ir0a%5&v0ve&cEtSJSJ zve|*_4sr|>&{aCYwR@}laLMe%y2><2dB(-NK)E10?i?dJ$W(yuv(C&Xu!Z77HV__X z5S{ePA$M~0%V#qh;jBv+6#7!A0N&+Lz#{0Lt}3XPhA(oPd<+^;P1vNo^78c`&O%$X#2Nq8g=J{k6sDVcrt*$2gY;K7fUbfZ#moLk|Cq$8{-@6Oq z;igs}Xj?_INY>RR*nuDQjK^?p@G@xL3heCoP&WXFN|C2?T+I$C-!i{etUM@<`ugmK zry5%QK`5&Ulvoi^p34!u4jbaM2>(TdO5*7AUI5+fMBEkzoB}v1B5ZBvL&*Zq!frOo zZ~DNHNw%FwDS(Al_YHuV?;cP-|DhrtNCb-Ob!L--T8n=wfxH0E%mv01zJV zXqV|D@GVj*m%B({u0uwL@O%q>@Ms`Y@M#fzE(Ba9!Y7C9e2Zf6X#XV;$EW{F@V_bw zVh$Wx`7Z`>F5|yO=Kp;wikpUgmL1n&U>VM&H#t3hBr91-oNZ%Q^6tKaDB3*yb^I;L zT#wVYw-0tI29apsp8Wfn2)HU0(Dvxuk(h!#h5xZ31P+9>|5GDg&{z3*7Jyj}xJ3{Efg*qlVgG!U z5$LPX8#>JAz6v)`2Qbk6^Hr6guUeYO6+7Po3i>LDAn!kC{226jJyohs=Uc!)H~L=! z|8GdpyCYvakv!)gOR0W5L@@|&*q4YowI;Qm>jMJyIHcb#wO9WhfcImQC6+Z|$Y0TaL}h4_=>LU6&g2iS zKunnq%ZeUIQUfEGcQf=SPk(Gy7M%Z%Ca9SF_jXadEG;E4O$U5!Gw{7PW|YtNA7q4c z%Lf(*$^qey{`1BS!aJJ$^cw&4&sqZjT^&qd{{5BrjJ)a=@``);A6VG;SIUXKr)M5zc$qh4L2Lr4f}{a=_IJRW35m zi+XVN-|tu8OHlL=Y=3@>+qaJi70-bv(HqYX4dS%DG@QO}|06w0AF{hXzf95N#d7vF z8|h`5WL|kd(d}?jFou5o`4qnqXc4#x7du)j#g?qX2;2f|tRT|qkbDtNzD&O=mSbX1 z$M3WMS&lnMK+|id?17*YkYCK{%#9737V`F5@;VDX> zUi^ufVDItUDifYEz{8d6 z+NVk-eTMJw_6~rS(C$PWTNgY%NNfv7tyu*e&%6&?iVfN(9~N@@r{-Cq(3XaBiGj1w za=&UaAYNm)Cqynk4nvtiO_h-3c}~BGhQ)jpW-`rSBXw6Vwb_susj>q8ZVZz-ua4OKOaKULZpVs3eJd`*;_9@@mM+y38Xf6gQ6~0P2$@cO&%M+T@}tVNgjf8 None: + """Reset bootstrap singleton state for test isolation.""" + global _state, _lock + for task in (_state.setup_task, _state.login_task): + if task is not None and not task.done(): + task.cancel() + _state = BootstrapState() + _lock = asyncio.Lock() + os.environ.pop("PLAYWRIGHT_BROWSERS_PATH", None) + + +def get_runtime_policy() -> RuntimePolicy: + """Return the active bootstrap runtime policy.""" + if _state.runtime_policy is not None: + return _state.runtime_policy + return ( + RuntimePolicy.DOCKER + if get_runtime_id().endswith("-container") + else RuntimePolicy.MANAGED + ) + + +def browsers_path() -> Path: + """Return the shared user-level Patchright browser cache path.""" + return auth_root_dir(get_profile_dir()) / _BROWSER_DIR + + +def install_metadata_path() -> Path: + """Return the browser install metadata path.""" + return auth_root_dir(get_profile_dir()) / _BROWSER_INSTALL_METADATA + + +def configure_browser_environment() -> Path: + """Ensure the shared browser cache path is configured.""" + browser_dir = browsers_path() + os.environ.setdefault("PLAYWRIGHT_BROWSERS_PATH", str(browser_dir)) + return browser_dir + + +def initialize_bootstrap(runtime_policy: RuntimePolicy | str | None = None) -> None: + """Initialize bootstrap state and configure the shared browser cache.""" + if _state.initialized: + return + configure_browser_environment() + _state.runtime_policy = RuntimePolicy(runtime_policy or get_runtime_policy()) + _state.initialized = True + + +def get_bootstrap_state() -> BootstrapState: + """Return current bootstrap state.""" + return _state + + +async def start_background_browser_setup_if_needed() -> None: + """Start shared background browser setup for managed runtimes if needed.""" + initialize_bootstrap() + if get_runtime_policy() != RuntimePolicy.MANAGED: + return + + async with _lock: + if _browser_setup_ready(): + _state.setup_state = SetupState.READY + _state.setup_completed_at = _state.setup_completed_at or utcnow_iso() + return + if _state.setup_task is not None and not _state.setup_task.done(): + return + _start_browser_setup_task_locked() + + +def browser_setup_ready() -> bool: + metadata_path = install_metadata_path() + configured_browsers_path = Path( + os.environ.get("PLAYWRIGHT_BROWSERS_PATH", str(browsers_path())) + ) + if not metadata_path.exists() or not configured_browsers_path.exists(): + return False + if not any(configured_browsers_path.iterdir()): + return False + try: + payload = json.loads(metadata_path.read_text()) + except (OSError, json.JSONDecodeError): + return False + return ( + isinstance(payload, dict) + and payload.get("browser_name") == "chromium" + and payload.get("installer_name") == "patchright" + ) + + +def _browser_setup_ready() -> bool: + """Compatibility wrapper for tests and internal callers.""" + return browser_setup_ready() + + +def _start_browser_setup_task_locked() -> None: + _state.setup_state = SetupState.RUNNING + _state.setup_started_at = utcnow_iso() + _state.last_error = None + _state.setup_completed_at = None + _state.setup_task = asyncio.create_task(_run_browser_setup(), name="browser-setup") + + +async def _run_browser_setup() -> None: + browser_dir = configure_browser_environment() + metadata_path = install_metadata_path() + browser_dir.mkdir(parents=True, exist_ok=True) + + proc = await asyncio.create_subprocess_exec( + sys.executable, + "-m", + "patchright", + "install", + "chromium", + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await proc.communicate() + if proc.returncode != 0: + output = "\n".join( + text for text in (stderr.decode().strip(), stdout.decode().strip()) if text + ) + raise BrowserSetupFailedError( + output or "Patchright Chromium browser setup failed." + ) + + metadata = { + "version": 1, + "runtime_id": get_runtime_id(), + "installed_at": utcnow_iso(), + "browsers_path": str(browser_dir), + "browser_name": "chromium", + "installer_name": "patchright", + } + metadata_path.write_text(json.dumps(metadata, indent=2, sort_keys=True) + "\n") + + +def _safe_task_done(task: asyncio.Task[None] | None) -> bool: + return task is not None and task.done() + + +async def _refresh_background_task_state() -> None: + if _safe_task_done(_state.setup_task): + task = _state.setup_task + assert task is not None + _state.setup_task = None + try: + task.result() + except asyncio.CancelledError: + _state.setup_state = SetupState.FAILED + _state.last_error = "Browser setup task was cancelled" + logger.warning("Patchright Chromium browser setup task cancelled") + except Exception as exc: + _state.setup_state = SetupState.FAILED + _state.last_error = str(exc) + logger.warning("Patchright Chromium browser setup failed: %s", exc) + else: + _state.setup_state = SetupState.READY + _state.setup_completed_at = utcnow_iso() + + if _safe_task_done(_state.login_task): + task = _state.login_task + assert task is not None + _state.login_task = None + try: + task.result() + except asyncio.CancelledError: + _state.auth_state = AuthState.FAILED + _state.last_error = "LinkedIn login bootstrap task was cancelled" + logger.warning("LinkedIn login bootstrap task cancelled") + except Exception as exc: + _state.auth_state = AuthState.FAILED + _state.last_error = str(exc) + logger.warning("LinkedIn login bootstrap failed: %s", exc) + else: + _state.auth_state = AuthState.READY + _state.auth_completed_at = utcnow_iso() + + +async def ensure_tool_ready_or_raise( + tool_name: str, ctx: Context | None = None +) -> None: + """Gate scrape/search tools on browser setup and authentication readiness.""" + initialize_bootstrap() + await _refresh_background_task_state() + + if get_runtime_policy() == RuntimePolicy.DOCKER: + _raise_if_docker_auth_missing() + return + + if _browser_setup_ready(): + _state.setup_state = SetupState.READY + else: + if _state.setup_state in {SetupState.IDLE, SetupState.FAILED} and ( + _state.setup_task is None or _state.setup_task.done() + ): + await start_background_browser_setup_if_needed() + if ctx is not None: + await ctx.report_progress( + progress=5, + total=100, + message=f"{tool_name}: Patchright Chromium browser setup still in progress", + ) + raise BrowserSetupInProgressError( + "LinkedIn setup is not complete yet. The Patchright Chromium browser is still downloading in the background. Retry this tool in a few minutes." + ) + + if _auth_ready(): + _state.auth_state = AuthState.READY + return + + await _start_login_if_needed(ctx) + + +def _raise_if_docker_auth_missing() -> None: + if _auth_ready(): + return + raise DockerHostLoginRequiredError( + "No valid LinkedIn session is available in Docker. Run --login on the host machine to create a session, then retry this tool." + ) + + +def _auth_ready() -> bool: + profile_dir = get_profile_dir() + return ( + profile_exists(profile_dir) + and portable_cookie_path(profile_dir).exists() + and source_state_path(profile_dir).exists() + and _has_source_state() + ) + + +def _has_source_state() -> bool: + try: + get_authentication_source() + except Exception: + return False + return True + + +async def _start_login_if_needed(ctx: Context | None = None) -> None: + async with _lock: + await _refresh_background_task_state() + + if _auth_ready(): + _state.auth_state = AuthState.READY + return + + if _state.login_task is not None and not _state.login_task.done(): + if ctx is not None: + await ctx.report_progress( + progress=25, + total=100, + message="LinkedIn login already in progress", + ) + raise AuthenticationInProgressError( + "No valid LinkedIn session is available yet. LinkedIn login is already in progress in a browser window. Complete login there, then retry this tool." + ) + + _move_invalid_auth_state_aside() + _state.auth_state = AuthState.STARTING + _state.auth_started_at = utcnow_iso() + _state.last_error = None + _state.auth_completed_at = None + _state.login_task = asyncio.create_task( + _run_login_flow(), name="linkedin-login" + ) + + if ctx is not None: + await ctx.report_progress( + progress=25, + total=100, + message="LinkedIn login browser opened", + ) + raise AuthenticationStartedError( + "No valid LinkedIn session was found. A login browser window has been opened. Sign in with your LinkedIn credentials there, then retry this tool." + ) + + +async def start_login_if_needed(ctx: Context | None = None) -> None: + """Public wrapper for starting the shared login workflow.""" + await _start_login_if_needed(ctx) + + +def _move_invalid_auth_state_aside() -> None: + profile_dir = get_profile_dir() + targets = [ + profile_dir, + portable_cookie_path(profile_dir), + source_state_path(profile_dir), + runtime_profiles_root(profile_dir), + ] + existing = [target for target in targets if target.exists()] + if not existing: + return + if _auth_ready(): + return + + backup_dir = ( + auth_root_dir(profile_dir) + / f"{_INVALID_STATE_PREFIX}{utcnow_iso().replace(':', '-')}" + ) + backup_dir.mkdir(parents=True, exist_ok=True) + for target in existing: + shutil.move(str(target), str(backup_dir / target.name)) + + +async def _run_login_flow() -> None: + _state.auth_state = AuthState.IN_PROGRESS + success = await interactive_login(get_profile_dir(), warm_up=True) + if not success: + raise AuthenticationBootstrapFailedError( + "LinkedIn login was not completed. Retry the tool call to reopen the browser and continue setup." + ) diff --git a/linkedin_mcp_server/cli_main.py b/linkedin_mcp_server/cli_main.py index f25a7d22..70d6207e 100644 --- a/linkedin_mcp_server/cli_main.py +++ b/linkedin_mcp_server/cli_main.py @@ -1,10 +1,4 @@ -""" -LinkedIn MCP Server - Main CLI application entry point. - -Implements a simplified two-phase startup: -1. Authentication Check - Verify browser profile is available -2. Server Runtime - MCP server startup with transport selection -""" +"""LinkedIn MCP Server main CLI application entry point.""" import asyncio import logging @@ -13,12 +7,9 @@ import inquirer -from linkedin_mcp_server.core import AuthenticationError, RateLimitError - -from linkedin_mcp_server.authentication import ( - clear_auth_state, - get_authentication_source, -) +from linkedin_mcp_server.bootstrap import configure_browser_environment +from linkedin_mcp_server.core import AuthenticationError +from linkedin_mcp_server.authentication import clear_auth_state from linkedin_mcp_server.config import get_config from linkedin_mcp_server.drivers.browser import ( experimental_persist_derived_runtime, @@ -29,7 +20,6 @@ set_headless, ) from linkedin_mcp_server.debug_trace import should_keep_traces -from linkedin_mcp_server.exceptions import CredentialsNotFoundError from linkedin_mcp_server.logging_config import configure_logging, teardown_trace_logging from linkedin_mcp_server.session_state import ( get_runtime_id, @@ -41,7 +31,7 @@ source_state_path, ) from linkedin_mcp_server.server import create_mcp_server -from linkedin_mcp_server.setup import run_interactive_setup, run_profile_creation +from linkedin_mcp_server.setup import run_profile_creation logger = logging.getLogger(__name__) @@ -238,43 +228,6 @@ async def check_session() -> bool: sys.exit(1) -def ensure_authentication_ready() -> None: - """ - Phase 1: Ensure authentication is ready. - - Checks for existing browser profile. - If not found, runs interactive setup in interactive mode. - - Raises: - CredentialsNotFoundError: If authentication setup fails - """ - config = get_config() - - # Check for existing profile - try: - get_authentication_source() - return - - except CredentialsNotFoundError: - pass - - # No authentication found - try interactive setup if possible - if not config.is_interactive: - raise CredentialsNotFoundError( - "No LinkedIn profile found.\n" - "Options:\n" - " 1. Run with --login to create a profile\n" - " 2. Run with --no-headless to login interactively" - ) - - # Run interactive setup - logger.info("No authentication found, starting interactive setup...") - success = run_interactive_setup() - - if not success: - raise CredentialsNotFoundError("Interactive setup was cancelled or failed") - - def get_version() -> str: """Get version from installed metadata with a source fallback.""" try: @@ -322,6 +275,8 @@ def main() -> None: logger.info(f"LinkedIn MCP Server v{version}") try: + configure_browser_environment() + # Set headless mode from config set_headless(config.browser.headless) @@ -339,38 +294,7 @@ def main() -> None: logger.debug(f"Server configuration: {config}") - # Phase 1: Ensure Authentication is Ready - try: - ensure_authentication_ready() - if config.is_interactive: - print("โœ… Authentication ready") - logger.info("Authentication ready") - - except CredentialsNotFoundError as e: - logger.error(f"Authentication setup failed: {e}") - if config.is_interactive: - print("\nโŒ Authentication required") - print(str(e)) - sys.exit(1) - - except KeyboardInterrupt: - if config.is_interactive: - print("\n\n๐Ÿ‘‹ Setup cancelled by user") - sys.exit(0) - - except (AuthenticationError, RateLimitError) as e: - logger.error(f"LinkedIn error during setup: {e}") - if config.is_interactive: - print(f"\nโŒ {str(e)}") - sys.exit(1) - - except Exception as e: - logger.exception(f"Unexpected error during authentication setup: {e}") - if config.is_interactive: - print(f"\nโŒ Setup failed: {e}") - sys.exit(1) - - # Phase 2: Server Runtime + # Phase 1: Server Runtime try: transport = config.server.transport diff --git a/linkedin_mcp_server/dependencies.py b/linkedin_mcp_server/dependencies.py index d6c0bda4..121ad7fd 100644 --- a/linkedin_mcp_server/dependencies.py +++ b/linkedin_mcp_server/dependencies.py @@ -1,22 +1,47 @@ -"""Dependency injection factories for MCP tools.""" +"""Helpers used by MCP tools after bootstrap gating.""" +from fastmcp import Context + +from linkedin_mcp_server.bootstrap import ensure_tool_ready_or_raise +from linkedin_mcp_server.core.exceptions import NetworkError from linkedin_mcp_server.drivers.browser import ( ensure_authenticated, get_or_create_browser, ) from linkedin_mcp_server.error_handler import raise_tool_error +from linkedin_mcp_server.exceptions import LinuxBrowserDependencyError from linkedin_mcp_server.scraping import LinkedInExtractor -async def get_extractor() -> LinkedInExtractor: - """Acquire the singleton browser, authenticate, and return a ready extractor. +def _is_linux_browser_dependency_error(error: Exception) -> bool: + message = str(error).lower() + markers = ( + "host system is missing dependencies", + "install-deps", + "shared libraries", + "libnss3", + "libatk", + ) + return any(marker in message for marker in markers) + - Known LinkedIn exceptions are converted to structured ToolError responses - via raise_tool_error(); unexpected exceptions propagate as-is. - """ +async def get_ready_extractor( + ctx: Context | None, + *, + tool_name: str, +) -> LinkedInExtractor: + """Run bootstrap gating, then acquire an authenticated extractor.""" try: + await ensure_tool_ready_or_raise(tool_name, ctx) browser = await get_or_create_browser() await ensure_authenticated() return LinkedInExtractor(browser.page) except Exception as e: - raise_tool_error(e, "get_extractor") # NoReturn + if isinstance(e, NetworkError) and _is_linux_browser_dependency_error(e): + raise_tool_error( + LinuxBrowserDependencyError( + "Chromium could not start because required system libraries are missing on this Linux host. Install the needed browser dependencies or use the Docker setup instead." + ), + tool_name, + ) + raise_tool_error(e, tool_name) # NoReturn diff --git a/linkedin_mcp_server/error_diagnostics.py b/linkedin_mcp_server/error_diagnostics.py index 6e8d95bf..e7fd0592 100644 --- a/linkedin_mcp_server/error_diagnostics.py +++ b/linkedin_mcp_server/error_diagnostics.py @@ -359,10 +359,10 @@ def _inside_running_event_loop() -> bool: def _installation_method_lines(runtime: dict[str, Any]) -> list[str]: current_runtime_id = str(runtime.get("current_runtime_id") or "") docker_checked = "x" if "container" in current_runtime_id else " " + managed_checked = " " if "container" in current_runtime_id else "x" return [ f"- [{docker_checked}] Docker (specify docker image version/tag): `stickerdaniel/linkedin-mcp-server:` with `~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`", - "- [ ] Claude Desktop DXT extension (specify docker image version/tag): _._._", - "- [ ] Local Python setup", + f"- [{managed_checked}] Managed runtime (Claude Desktop MCP Bundle, `uvx`, or local `uv run` setup)", ] @@ -373,7 +373,7 @@ def _installation_method_summary(runtime: dict[str, Any]) -> str: "Docker using `stickerdaniel/linkedin-mcp-server:` with " "`~/.linkedin-mcp` mounted into `/home/pwuser/.linkedin-mcp`" ) - return "Local Python setup" + return "Managed runtime (Claude Desktop MCP Bundle, `uvx`, or local `uv run` setup)" def _tool_name_for_context(payload: dict[str, Any]) -> str | None: diff --git a/linkedin_mcp_server/error_handler.py b/linkedin_mcp_server/error_handler.py index c245ecbf..00f49235 100644 --- a/linkedin_mcp_server/error_handler.py +++ b/linkedin_mcp_server/error_handler.py @@ -22,7 +22,14 @@ ) from linkedin_mcp_server.exceptions import ( + AuthenticationBootstrapFailedError, + AuthenticationInProgressError, + AuthenticationStartedError, + BrowserSetupFailedError, + BrowserSetupInProgressError, CredentialsNotFoundError, + DockerHostLoginRequiredError, + LinuxBrowserDependencyError, LinkedInMCPError, SessionExpiredError, ) @@ -76,6 +83,36 @@ def raise_tool_error(exception: Exception, context: str = "") -> NoReturn: context=context, ) + elif isinstance(exception, BrowserSetupInProgressError): + logger.info("Browser setup in progress%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + + elif isinstance(exception, BrowserSetupFailedError): + logger.warning("Browser setup failed%s: %s", ctx, exception) + raise ToolError( + "LinkedIn browser setup was not ready. A fresh setup attempt has started in the background. Retry this tool in a few minutes." + ) from exception + + elif isinstance(exception, AuthenticationStartedError): + logger.info("Authentication started%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + + elif isinstance(exception, AuthenticationInProgressError): + logger.info("Authentication in progress%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + + elif isinstance(exception, AuthenticationBootstrapFailedError): + logger.warning("Authentication bootstrap failed%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + + elif isinstance(exception, DockerHostLoginRequiredError): + logger.warning("Docker host login required%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + + elif isinstance(exception, LinuxBrowserDependencyError): + logger.warning("Linux browser dependency missing%s: %s", ctx, exception) + raise ToolError(str(exception)) from exception + elif isinstance(exception, SessionExpiredError): logger.warning("Session expired%s: %s", ctx, exception) _raise_tool_error_with_diagnostics( diff --git a/linkedin_mcp_server/exceptions.py b/linkedin_mcp_server/exceptions.py index b06b49d8..81f0d14e 100644 --- a/linkedin_mcp_server/exceptions.py +++ b/linkedin_mcp_server/exceptions.py @@ -29,3 +29,31 @@ def __init__(self, message: str | None = None): " Run with --login to create a new session" ) super().__init__(message or default_msg) + + +class BrowserSetupInProgressError(LinkedInMCPError): + """Patchright Chromium browser setup is still running.""" + + +class BrowserSetupFailedError(LinkedInMCPError): + """Patchright Chromium browser setup failed.""" + + +class AuthenticationStartedError(LinkedInMCPError): + """Interactive LinkedIn login has been started.""" + + +class AuthenticationInProgressError(LinkedInMCPError): + """Interactive LinkedIn login is already running.""" + + +class AuthenticationBootstrapFailedError(LinkedInMCPError): + """Interactive LinkedIn login could not be completed.""" + + +class DockerHostLoginRequiredError(LinkedInMCPError): + """Docker runtime requires host-side login creation.""" + + +class LinuxBrowserDependencyError(LinkedInMCPError): + """Linux host dependencies required for Chromium are missing.""" diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index 11025d2a..bd6a6e4e 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -11,8 +11,12 @@ from fastmcp import FastMCP from fastmcp.server.lifespan import lifespan +from linkedin_mcp_server.bootstrap import ( + get_runtime_policy, + initialize_bootstrap, + start_background_browser_setup_if_needed, +) from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS -from linkedin_mcp_server.authentication import get_authentication_source from linkedin_mcp_server.drivers.browser import close_browser from linkedin_mcp_server.error_handler import raise_tool_error from linkedin_mcp_server.sequential_tool_middleware import ( @@ -32,25 +36,20 @@ async def browser_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: Derived runtime durability must not depend on this hook. Docker runtime sessions are checkpoint-committed when they are created. """ + del app logger.info("LinkedIn MCP Server starting...") + initialize_bootstrap(get_runtime_policy()) + await start_background_browser_setup_if_needed() yield {} logger.info("LinkedIn MCP Server shutting down...") await close_browser() -@lifespan -async def auth_lifespan(app: FastMCP) -> AsyncIterator[dict[str, Any]]: - """Validate authentication profile exists at startup.""" - logger.info("Validating LinkedIn authentication...") - get_authentication_source() - yield {} - - def create_mcp_server() -> FastMCP: """Create and configure the MCP server with all LinkedIn tools.""" mcp = FastMCP( "linkedin_scraper", - lifespan=auth_lifespan | browser_lifespan, + lifespan=browser_lifespan, mask_error_details=True, ) mcp.add_middleware(SequentialToolExecutionMiddleware()) diff --git a/linkedin_mcp_server/tools/company.py b/linkedin_mcp_server/tools/company.py index 681b8ec5..def024c3 100644 --- a/linkedin_mcp_server/tools/company.py +++ b/linkedin_mcp_server/tools/company.py @@ -9,12 +9,11 @@ from typing import Any from fastmcp import Context, FastMCP -from fastmcp.dependencies import Depends from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS -from linkedin_mcp_server.dependencies import get_extractor +from linkedin_mcp_server.dependencies import get_ready_extractor from linkedin_mcp_server.error_handler import raise_tool_error -from linkedin_mcp_server.scraping import LinkedInExtractor, parse_company_sections +from linkedin_mcp_server.scraping import parse_company_sections from linkedin_mcp_server.scraping.extractor import _RATE_LIMITED_MSG from linkedin_mcp_server.scraping.link_metadata import Reference @@ -29,12 +28,13 @@ def register_company_tools(mcp: FastMCP) -> None: title="Get Company Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, + exclude_args=["extractor"], ) async def get_company_profile( company_name: str, ctx: Context, sections: str | None = None, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Get a specific company's LinkedIn profile. @@ -54,6 +54,9 @@ async def get_company_profile( The LLM should parse the raw text in each section. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="get_company_profile" + ) requested, unknown = parse_company_sections(sections) logger.info( @@ -83,11 +86,12 @@ async def get_company_profile( title="Get Company Posts", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"company", "scraping"}, + exclude_args=["extractor"], ) async def get_company_posts( company_name: str, ctx: Context, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Get recent posts from a company's LinkedIn feed. @@ -101,6 +105,9 @@ async def get_company_posts( The LLM should parse the raw text to extract individual posts. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="get_company_posts" + ) logger.info("Scraping company posts: %s", company_name) await ctx.report_progress( diff --git a/linkedin_mcp_server/tools/job.py b/linkedin_mcp_server/tools/job.py index dcf365a1..1d4a9500 100644 --- a/linkedin_mcp_server/tools/job.py +++ b/linkedin_mcp_server/tools/job.py @@ -8,13 +8,11 @@ from typing import Annotated, Any from fastmcp import Context, FastMCP -from fastmcp.dependencies import Depends from pydantic import Field from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS -from linkedin_mcp_server.dependencies import get_extractor +from linkedin_mcp_server.dependencies import get_ready_extractor from linkedin_mcp_server.error_handler import raise_tool_error -from linkedin_mcp_server.scraping import LinkedInExtractor logger = logging.getLogger(__name__) @@ -27,11 +25,12 @@ def register_job_tools(mcp: FastMCP) -> None: title="Get Job Details", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "scraping"}, + exclude_args=["extractor"], ) async def get_job_details( job_id: str, ctx: Context, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Get job details for a specific job posting on LinkedIn. @@ -45,6 +44,9 @@ async def get_job_details( The LLM should parse the raw text to extract job details. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="get_job_details" + ) logger.info("Scraping job: %s", job_id) await ctx.report_progress( @@ -65,6 +67,7 @@ async def get_job_details( title="Search Jobs", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"job", "search"}, + exclude_args=["extractor"], ) async def search_jobs( keywords: str, @@ -77,7 +80,7 @@ async def search_jobs( work_type: str | None = None, easy_apply: bool = False, sort_by: str | None = None, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Search for jobs on LinkedIn. @@ -101,6 +104,9 @@ async def search_jobs( numeric job ID strings usable with get_job_details), and optional references. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="search_jobs" + ) logger.info( "Searching jobs: keywords='%s', location='%s', max_pages=%d", keywords, diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index afa058bb..79053c24 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -9,12 +9,11 @@ from typing import Any from fastmcp import Context, FastMCP -from fastmcp.dependencies import Depends from linkedin_mcp_server.constants import TOOL_TIMEOUT_SECONDS -from linkedin_mcp_server.dependencies import get_extractor +from linkedin_mcp_server.dependencies import get_ready_extractor from linkedin_mcp_server.error_handler import raise_tool_error -from linkedin_mcp_server.scraping import LinkedInExtractor, parse_person_sections +from linkedin_mcp_server.scraping import parse_person_sections logger = logging.getLogger(__name__) @@ -27,12 +26,13 @@ def register_person_tools(mcp: FastMCP) -> None: title="Get Person Profile", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "scraping"}, + exclude_args=["extractor"], ) async def get_person_profile( linkedin_username: str, ctx: Context, sections: str | None = None, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Get a specific person's LinkedIn profile. @@ -53,6 +53,9 @@ async def get_person_profile( The LLM should parse the raw text in each section. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="get_person_profile" + ) requested, unknown = parse_person_sections(sections) logger.info( @@ -82,12 +85,13 @@ async def get_person_profile( title="Search People", annotations={"readOnlyHint": True, "openWorldHint": True}, tags={"person", "search"}, + exclude_args=["extractor"], ) async def search_people( keywords: str, ctx: Context, location: str | None = None, - extractor: LinkedInExtractor = Depends(get_extractor), + extractor: Any | None = None, ) -> dict[str, Any]: """ Search for people on LinkedIn. @@ -102,6 +106,9 @@ async def search_people( The LLM should parse the raw text to extract individual people and their profiles. """ try: + extractor = extractor or await get_ready_extractor( + ctx, tool_name="search_people" + ) logger.info( "Searching people: keywords='%s', location='%s'", keywords, diff --git a/manifest.json b/manifest.json index 77a7d94c..3d1a8f8f 100644 --- a/manifest.json +++ b/manifest.json @@ -1,10 +1,10 @@ { - "dxt_version": "0.1", + "manifest_version": "0.4", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", "version": "4.4.2", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", - "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account. Access profiles, companies, and job postings through a Docker container on your machine.\n\n## Features\n- **Profile Access**: Get detailed LinkedIn profile information including work history, education, and skills\n- **Company Profiles**: Extract comprehensive company information and details\n- **Job Details**: Retrieve job posting information from LinkedIn URLs\n- **Job Search**: Search for jobs with keywords and location filters\n- **People Search**: Search for people by keywords and location\n- **Company Posts**: Get recent posts from a company's LinkedIn feed\n- **Person Posts**: Get recent activity and posts from a person's profile\n\n## First-Time Setup\n\n### 1. Pre-pull Docker Image (Required)\nRun this command first to avoid connection timeouts:\n```\ndocker pull stickerdaniel/linkedin-mcp-server:4.4.2", + "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account with an MCP Bundle (MCPB, formerly DXT). The bundle starts quickly, downloads the Patchright Chromium browser in the background when needed, and opens LinkedIn login on the first auth-requiring tool call.\n\n## First-time managed runtime flow\n\n1. Install the `.mcpb` bundle in Claude Desktop.\n2. Start Claude Desktop; the MCP server starts and begins preparing the Patchright Chromium browser cache under `~/.linkedin-mcp/patchright-browsers`.\n3. If you call a tool before setup finishes, the tool returns a setup-in-progress error.\n4. On the first tool call that needs authentication, a browser window opens so you can sign into LinkedIn.\n5. Retry the tool after login completes.\n\nDocker remains available as a separate runtime path, but it still requires host-side `--login`.", "author": { "name": "Daniel Sticker", "email": "daniel@sticker.name", @@ -14,19 +14,33 @@ "documentation": "https://github.com/stickerdaniel/linkedin-mcp-server#readme", "support": "https://github.com/stickerdaniel/linkedin-mcp-server/issues", "license": "MIT", - "keywords": ["linkedin", "scraping", "mcp", "profiles", "companies", "jobs", "people", "search", "posts"], - "icon": "assets/icons/linkedin.svg", - "screenshots": ["assets/screenshots/screenshot.png"], + "keywords": [ + "linkedin", + "scraping", + "mcp", + "mcpb", + "profiles", + "companies", + "jobs", + "people", + "search", + "posts" + ], + "icon": "assets/icons/linkedin.png", + "screenshots": [ + "assets/screenshots/screenshot.png" + ], "server": { - "type": "binary", - "entry_point": "docker", + "type": "uv", + "entry_point": "linkedin_mcp_server/__main__.py", "mcp_config": { - "command": "docker", + "command": "uv", "args": [ - "run", "--rm", "-i", - "-v", "${HOME}/.linkedin-mcp:/home/pwuser/.linkedin-mcp", - "-e", "LOG_LEVEL=DEBUG", - "stickerdaniel/linkedin-mcp-server:4.4.2" + "run", + "--project", + "${__dirname}", + "-m", + "linkedin_mcp_server" ] } }, @@ -63,6 +77,10 @@ "user_config": {}, "compatibility": { "claude_desktop": ">=0.10.0", - "platforms": ["darwin", "linux", "win32"] + "platforms": [ + "darwin", + "linux", + "win32" + ] } } diff --git a/tests/conftest.py b/tests/conftest.py index a3845335..801cc186 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,12 +4,15 @@ @pytest.fixture(autouse=True) def reset_singletons(): """Reset global state for test isolation.""" + from linkedin_mcp_server.bootstrap import reset_bootstrap_for_testing from linkedin_mcp_server.config import reset_config from linkedin_mcp_server.drivers.browser import reset_browser_for_testing + reset_bootstrap_for_testing() reset_browser_for_testing() reset_config() yield + reset_bootstrap_for_testing() reset_browser_for_testing() reset_config() diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py new file mode 100644 index 00000000..b37c6fb7 --- /dev/null +++ b/tests/test_bootstrap.py @@ -0,0 +1,151 @@ +import asyncio +import os +from unittest.mock import MagicMock + +import pytest + +from linkedin_mcp_server.bootstrap import ( + AuthState, + ensure_tool_ready_or_raise, + get_bootstrap_state, + get_runtime_policy, + initialize_bootstrap, + install_metadata_path, + browsers_path, + reset_bootstrap_for_testing, + SetupState, + start_background_browser_setup_if_needed, +) +from linkedin_mcp_server.exceptions import ( + AuthenticationInProgressError, + AuthenticationStartedError, + BrowserSetupInProgressError, + DockerHostLoginRequiredError, +) + + +class TestBootstrap: + async def test_managed_startup_starts_background_setup(self, monkeypatch): + async def fake_setup() -> None: + return None + + monkeypatch.setattr( + "linkedin_mcp_server.bootstrap.browser_setup_ready", lambda: False + ) + monkeypatch.setattr( + "linkedin_mcp_server.bootstrap._run_browser_setup", fake_setup + ) + + initialize_bootstrap("managed") + await start_background_browser_setup_if_needed() + + state = get_bootstrap_state() + assert state.setup_state is SetupState.RUNNING + assert state.setup_task is not None + await state.setup_task + + async def test_setup_in_progress_raises(self): + initialize_bootstrap("managed") + state = get_bootstrap_state() + state.setup_state = SetupState.RUNNING + state.setup_task = MagicMock(done=lambda: False) + + with pytest.raises(BrowserSetupInProgressError): + await ensure_tool_ready_or_raise("search_jobs") + + async def test_missing_auth_starts_login(self, monkeypatch): + async def fake_start_login(ctx=None) -> None: + raise AuthenticationStartedError( + "No valid LinkedIn session was found. A login browser window has been opened. Sign in with your LinkedIn credentials there, then retry this tool." + ) + + monkeypatch.setattr( + "linkedin_mcp_server.bootstrap.browser_setup_ready", lambda: True + ) + monkeypatch.setattr("linkedin_mcp_server.bootstrap._auth_ready", lambda: False) + monkeypatch.setattr( + "linkedin_mcp_server.bootstrap._start_login_if_needed", fake_start_login + ) + + initialize_bootstrap("managed") + + with pytest.raises(AuthenticationStartedError): + await ensure_tool_ready_or_raise("get_person_profile") + + async def test_login_in_progress_reuses_existing_session(self, monkeypatch): + monkeypatch.setattr( + "linkedin_mcp_server.bootstrap.browser_setup_ready", lambda: True + ) + monkeypatch.setattr("linkedin_mcp_server.bootstrap._auth_ready", lambda: False) + + initialize_bootstrap("managed") + state = get_bootstrap_state() + state.auth_state = AuthState.IN_PROGRESS + state.login_task = MagicMock(done=lambda: False) + + with pytest.raises(AuthenticationInProgressError): + await ensure_tool_ready_or_raise("get_person_profile") + + async def test_docker_requires_host_login(self, monkeypatch): + monkeypatch.setattr("linkedin_mcp_server.bootstrap._auth_ready", lambda: False) + initialize_bootstrap("docker") + with pytest.raises(DockerHostLoginRequiredError): + await ensure_tool_ready_or_raise("search_jobs") + + def test_reset_bootstrap_clears_state(self): + initialize_bootstrap("managed") + reset_bootstrap_for_testing() + state = get_bootstrap_state() + assert state.runtime_policy is None + assert state.initialized is False + assert "PLAYWRIGHT_BROWSERS_PATH" not in os.environ + + def test_reset_bootstrap_clears_browser_env_var(self): + os.environ["PLAYWRIGHT_BROWSERS_PATH"] = "/tmp/stale-browser-cache" + + reset_bootstrap_for_testing() + + assert "PLAYWRIGHT_BROWSERS_PATH" not in os.environ + + def test_reset_bootstrap_cancels_running_tasks(self): + setup_task = MagicMock() + setup_task.done.return_value = False + login_task = MagicMock() + login_task.done.return_value = False + + initialize_bootstrap("managed") + state = get_bootstrap_state() + state.setup_task = setup_task + state.login_task = login_task + + reset_bootstrap_for_testing() + + setup_task.cancel.assert_called_once_with() + login_task.cancel.assert_called_once_with() + + async def test_cancelled_setup_task_retries_cleanly(self): + initialize_bootstrap("managed") + state = get_bootstrap_state() + task = asyncio.create_task(asyncio.sleep(10), name="browser-setup") + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + state.setup_task = task + + with pytest.raises(BrowserSetupInProgressError): + await ensure_tool_ready_or_raise("search_jobs") + + assert state.setup_state is SetupState.RUNNING + assert state.setup_task is not None + + def test_managed_browser_path_defaults_under_auth_root(self, isolate_profile_dir): + path = browsers_path() + assert path == isolate_profile_dir.parent / "patchright-browsers" + + def test_install_metadata_path_defaults_under_auth_root(self, isolate_profile_dir): + path = install_metadata_path() + assert path == isolate_profile_dir.parent / "browser-install.json" + + def test_runtime_policy_uses_initialized_value(self): + initialize_bootstrap("managed") + assert get_runtime_policy() == "managed" diff --git a/tests/test_cli_main.py b/tests/test_cli_main.py index e428ff7a..c713e739 100644 --- a/tests/test_cli_main.py +++ b/tests/test_cli_main.py @@ -9,7 +9,6 @@ import linkedin_mcp_server.cli_main as cli_main from linkedin_mcp_server.config.schema import AppConfig -from linkedin_mcp_server.exceptions import CredentialsNotFoundError def _make_config( @@ -33,9 +32,6 @@ def _patch_main_dependencies( "linkedin_mcp_server.cli_main.configure_logging", lambda **_kwargs: None ) monkeypatch.setattr("linkedin_mcp_server.cli_main.get_version", lambda: "4.0.0") - monkeypatch.setattr( - "linkedin_mcp_server.cli_main.ensure_authentication_ready", lambda: None - ) monkeypatch.setattr("linkedin_mcp_server.cli_main.set_headless", lambda _x: None) @@ -149,22 +145,19 @@ def fake_version(package_name: str) -> str: assert calls == ["linkedin-scraper-mcp"] -def test_main_non_interactive_auth_failure_has_no_stdout( +def test_main_non_interactive_no_auth_still_starts_server( monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str] ) -> None: config = _make_config( is_interactive=False, transport="stdio", transport_explicitly_set=False ) _patch_main_dependencies(monkeypatch, config) - monkeypatch.setattr( - "linkedin_mcp_server.cli_main.ensure_authentication_ready", - lambda: (_ for _ in ()).throw(CredentialsNotFoundError("missing profile")), - ) + mcp = MagicMock() + monkeypatch.setattr("linkedin_mcp_server.cli_main.create_mcp_server", lambda: mcp) - with pytest.raises(SystemExit) as exit_info: - cli_main.main() + cli_main.main() - assert exit_info.value.code == 1 + mcp.run.assert_called_once_with(transport="stdio") captured = capsys.readouterr() assert captured.out == "" diff --git a/tests/test_error_diagnostics.py b/tests/test_error_diagnostics.py index b176d95a..6f15f8bc 100644 --- a/tests/test_error_diagnostics.py +++ b/tests/test_error_diagnostics.py @@ -3,6 +3,8 @@ import pytest from linkedin_mcp_server.error_diagnostics import ( + _installation_method_lines, + _installation_method_summary, build_issue_diagnostics, format_tool_error_with_diagnostics, ) @@ -224,6 +226,35 @@ def test_build_issue_diagnostics_marks_inferred_tool_and_container_runtime( assert "- Tool: search_jobs" in issue_body +def test_installation_method_lines_marks_managed_runtime() -> None: + lines = _installation_method_lines( + { + "current_runtime_id": "macos-arm64-host", + } + ) + + assert lines[0].startswith("- [ ] Docker") + assert ( + lines[1] + == "- [x] Managed runtime (Claude Desktop MCP Bundle, `uvx`, or local `uv run` setup)" + ) + + +def test_installation_method_summary_returns_managed_runtime_for_non_container() -> ( + None +): + summary = _installation_method_summary( + { + "current_runtime_id": "macos-arm64-host", + } + ) + + assert ( + summary + == "Managed runtime (Claude Desktop MCP Bundle, `uvx`, or local `uv run` setup)" + ) + + def test_build_issue_diagnostics_keeps_sensitive_runtime_details_out_of_mcp_payload( monkeypatch, tmp_path ): diff --git a/tests/test_tools.py b/tests/test_tools.py index 857c9a67..8adde605 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -131,6 +131,10 @@ async def test_get_person_profile_auth_error(self, monkeypatch): mock_browser = MagicMock() mock_browser.page = MagicMock() + monkeypatch.setattr( + "linkedin_mcp_server.dependencies.ensure_tool_ready_or_raise", + AsyncMock(return_value=None), + ) monkeypatch.setattr( "linkedin_mcp_server.dependencies.get_or_create_browser", AsyncMock(return_value=mock_browser), From 7d2a5f5ab50f91c1aa1ff38fdde0187c22c28f7d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 18:23:34 +0100 Subject: [PATCH 543/565] chore: bump version to 4.5.0 --- README.md | 2 +- docs/docker-hub.md | 2 +- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index a6cbff53..bfdcd2c9 100644 --- a/README.md +++ b/README.md @@ -175,7 +175,7 @@ parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs. ## ๐Ÿณ Docker Setup -**Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running. +**Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running, and [uv](https://docs.astral.sh/uv/getting-started/installation/) installed on the host for the one-time `--login` step. ### Authentication diff --git a/docs/docker-hub.md b/docs/docker-hub.md index 4a0a5f8a..5acb00f8 100644 --- a/docs/docker-hub.md +++ b/docs/docker-hub.md @@ -15,7 +15,7 @@ A Model Context Protocol (MCP) server that connects AI assistants to LinkedIn. A ## Quick Start -Create a browser profile locally, then mount it into Docker. Docker already includes its own Chromium runtime, so the managed Patchright Chromium browser download used by MCPB/`uvx` is not needed here. +Create a browser profile locally, then mount it into Docker. You still need [uv](https://docs.astral.sh/uv/getting-started/installation/) installed on the host for the one-time `uvx linkedin-scraper-mcp --login` step. Docker already includes its own Chromium runtime, so the managed Patchright Chromium browser download used by MCPB/`uvx` is not needed here. **Step 1: Create profile on the host (one-time setup)** diff --git a/pyproject.toml b/pyproject.toml index 02812b07..f555dfcc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.4.2" +version = "4.5.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index b1e15cb8..97620528 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.4.2" +version = "4.5.0" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 9e059f18bf6d015cb9e37f8f0d98a9fe0f1513e9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:25:44 +0000 Subject: [PATCH 544/565] chore: update manifest.json and docker-compose.yml to v4.5.0 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index e1a7c331..7aaf242c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.4.2 + image: stickerdaniel/linkedin-mcp-server:4.5.0 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 3d1a8f8f..3d13d707 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "manifest_version": "0.4", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.4.2", + "version": "4.5.0", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account with an MCP Bundle (MCPB, formerly DXT). The bundle starts quickly, downloads the Patchright Chromium browser in the background when needed, and opens LinkedIn login on the first auth-requiring tool call.\n\n## First-time managed runtime flow\n\n1. Install the `.mcpb` bundle in Claude Desktop.\n2. Start Claude Desktop; the MCP server starts and begins preparing the Patchright Chromium browser cache under `~/.linkedin-mcp/patchright-browsers`.\n3. If you call a tool before setup finishes, the tool returns a setup-in-progress error.\n4. On the first tool call that needs authentication, a browser window opens so you can sign into LinkedIn.\n5. Retry the tool after login completes.\n\nDocker remains available as a separate runtime path, but it still requires host-side `--login`.", "author": { From 57f279406e249c8d61ea36b753a29f44eeaee1b9 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 18:35:11 +0100 Subject: [PATCH 545/565] fix(ci): repair release workflow --- .github/workflows/release.yml | 4 ++-- pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 01db9960..8725401c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -172,8 +172,8 @@ jobs: - name: Validate and build MCP bundle run: | - bunx @anthropic-ai/mcpb validate - bunx @anthropic-ai/mcpb pack + bunx @anthropic-ai/mcpb validate manifest.json + bunx @anthropic-ai/mcpb pack . mv linkedin-mcp-server.mcpb linkedin-mcp-server-v$VERSION.mcpb - name: Generate release notes diff --git a/pyproject.toml b/pyproject.toml index f555dfcc..44f28c84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.5.0" +version = "4.5.1" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 97620528..ec5cccb6 100644 --- a/uv.lock +++ b/uv.lock @@ -951,7 +951,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.5.0" +version = "4.5.1" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 38e61c43eaab67d806a28a10b7057904d1e72f61 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 21 Mar 2026 17:38:58 +0000 Subject: [PATCH 546/565] chore: update manifest.json and docker-compose.yml to v4.5.1 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 7aaf242c..f5814f62 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.5.0 + image: stickerdaniel/linkedin-mcp-server:4.5.1 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 3d13d707..2de6e8c8 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "manifest_version": "0.4", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.5.0", + "version": "4.5.1", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account with an MCP Bundle (MCPB, formerly DXT). The bundle starts quickly, downloads the Patchright Chromium browser in the background when needed, and opens LinkedIn login on the first auth-requiring tool call.\n\n## First-time managed runtime flow\n\n1. Install the `.mcpb` bundle in Claude Desktop.\n2. Start Claude Desktop; the MCP server starts and begins preparing the Patchright Chromium browser cache under `~/.linkedin-mcp/patchright-browsers`.\n3. If you call a tool before setup finishes, the tool returns a setup-in-progress error.\n4. On the first tool call that needs authentication, a browser window opens so you can sign into LinkedIn.\n5. Retry the tool after login completes.\n\nDocker remains available as a separate runtime path, but it still requires host-side `--login`.", "author": { From 06b8884d6b177318f5ad07c2f9edf8b56a8f2cf8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 18:48:39 +0100 Subject: [PATCH 547/565] docs: trim readme install docs --- README.md | 88 ++++++++++++++++++++++++++----------------------------- 1 file changed, 42 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index bfdcd2c9..dd3b2b67 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,8 @@ Through this LinkedIn MCP server, AI assistants like Claude can connect to your ## Installation Methods [![uvx](https://img.shields.io/badge/uvx-Quick_Install-de5fe9?style=for-the-badge&logo=data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDEiIGhlaWdodD0iNDEiIHZpZXdCb3g9IjAgMCA0MSA0MSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTS01LjI4NjE5ZS0wNiAwLjE2ODYyOUwwLjA4NDMwOTggMjAuMTY4NUwwLjE1MTc2MiAzNi4xNjgzQzAuMTYxMDc1IDM4LjM3NzQgMS45NTk0NyA0MC4xNjA3IDQuMTY4NTkgNDAuMTUxNEwyMC4xNjg0IDQwLjA4NEwzMC4xNjg0IDQwLjA0MThMMzEuMTg1MiA0MC4wMzc1QzMzLjM4NzcgNDAuMDI4MiAzNS4xNjgzIDM4LjIwMjYgMzUuMTY4MyAzNlYzNkwzNy4wMDAzIDM2TDM3LjAwMDMgMzkuOTk5Mkw0MC4xNjgzIDM5Ljk5OTZMMzkuOTk5NiAtOS45NDY1M2UtMDdMMjEuNTk5OCAwLjA3NzU2ODlMMjEuNjc3NCAxNi4wMTg1TDIxLjY3NzQgMjUuOTk5OEwyMC4wNzc0IDI1Ljk5OThMMTguMzk5OCAyNS45OTk4TDE4LjQ3NzQgMTYuMDMyTDE4LjM5OTggMC4wOTEwNTkzTC01LjI4NjE5ZS0wNiAwLjE2ODYyOVoiIGZpbGw9IiNERTVGRTkiLz4KPC9zdmc+Cg==)](#-uvx-setup-recommended---universal) -[![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup) [![Install MCP Bundle](https://img.shields.io/badge/Claude_Desktop_MCPB-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-mcp-bundle-formerly-dxt) +[![Docker](https://img.shields.io/badge/Docker-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup) [![Development](https://img.shields.io/badge/Development-Local-ffdc53?style=for-the-badge&logo=python&logoColor=ffdc53)](#-local-setup-develop--contribute) @@ -48,10 +48,6 @@ What has Anthropic been posting about recently? https://www.linkedin.com/company | `get_job_details` | Get detailed information about a specific job posting | Working | | `close_session` | Close browser session and clean up resources | Working | -Tool responses keep readable `sections` text and may also include a compact `references` map keyed by section. Each reference includes a typed target, a relative LinkedIn path (or absolute external URL), and a short label/context when available. - -When one section fails but the overall tool call still completes, responses may also include `section_errors`. Each entry contains structured diagnostics for that section, including the error type/message, a compact runtime summary, trace/log locations, matching-open-issue hints when available, and the path to a generated issue-ready markdown report with the full session details. - > [!IMPORTANT] > **Breaking change:** LinkedIn recently made some changes to prevent scraping. The newest version uses [Patchright](https://github.com/Kaliiiiiiiiii-Vinyzu/patchright-python) with persistent browser profiles instead of Playwright with session files. Old `session.json` files and `LINKEDIN_COOKIE` env vars are no longer supported. Run `--login` again to create a new profile + cookie file that can be mounted in docker. 02/2026 @@ -173,6 +169,47 @@ parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs.

+## ๐Ÿ“ฆ Claude Desktop MCP Bundle (formerly DXT) + +**Prerequisites:** [Claude Desktop](https://claude.ai/download). + +**One-click installation** for Claude Desktop users: + +1. Download the latest `.mcpb` artifact from [releases](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) +2. Double-click to install it into Claude Desktop +3. Restart Claude Desktop +4. Call any LinkedIn tool + +On startup, the MCP Bundle starts preparing the shared Patchright Chromium browser cache in the background. If you call a tool too early, Claude will surface a setup-in-progress error. On the first tool call that needs authentication, the server opens a LinkedIn login browser window and asks you to retry after sign-in. + +### MCP Bundle Setup Help + +
+โ— Troubleshooting + +**First-time setup behavior:** + +- Claude Desktop starts the bundle immediately; browser setup continues in the background +- If the Patchright Chromium browser is still downloading, retry the tool after a short wait +- Managed browser downloads are shared under `~/.linkedin-mcp/patchright-browsers/` + +**Login issues:** + +- Make sure you have only one active LinkedIn session at a time +- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` +- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. + +**Timeout issues:** + +- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` +- Users on slow connections may need higher values (e.g., 15000-30000ms) +- Can also set via environment variable: `TIMEOUT=10000` + +
+ +
+
+ ## ๐Ÿณ Docker Setup **Prerequisites:** Make sure you have [Docker](https://www.docker.com/get-started/) installed and running, and [uv](https://docs.astral.sh/uv/getting-started/installation/) installed on the host for the one-time `--login` step. @@ -293,47 +330,6 @@ Runtime server logs are emitted by FastMCP/Uvicorn.

-## ๐Ÿ“ฆ Claude Desktop MCP Bundle (formerly DXT) - -**Prerequisites:** [Claude Desktop](https://claude.ai/download). - -**One-click installation** for Claude Desktop users: - -1. Download the latest `.mcpb` artifact from [releases](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) -2. Double-click to install it into Claude Desktop -3. Restart Claude Desktop -4. Call any LinkedIn tool - -On startup, the MCP Bundle starts preparing the shared Patchright Chromium browser cache in the background. If you call a tool too early, Claude will surface a setup-in-progress error. On the first tool call that needs authentication, the server opens a LinkedIn login browser window and asks you to retry after sign-in. - -### MCP Bundle Setup Help - -
-โ— Troubleshooting - -**First-time setup behavior:** - -- Claude Desktop starts the bundle immediately; browser setup continues in the background -- If the Patchright Chromium browser is still downloading, retry the tool after a short wait -- Managed browser downloads are shared under `~/.linkedin-mcp/patchright-browsers/` - -**Login issues:** - -- Make sure you have only one active LinkedIn session at a time -- LinkedIn may require a login confirmation in the LinkedIn mobile app for `--login` -- You might get a captcha challenge if you logged in frequently. Run `uvx linkedin-scraper-mcp --login` which opens a browser where you can solve captchas manually. See the [uvx setup](#-uvx-setup-recommended---universal) for prerequisites. - -**Timeout issues:** - -- If pages fail to load or elements aren't found, try increasing the timeout: `--timeout 10000` -- Users on slow connections may need higher values (e.g., 15000-30000ms) -- Can also set via environment variable: `TIMEOUT=10000` - -
- -
-
- ## ๐Ÿ Local Setup (Develop & Contribute) Contributions are welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for architecture guidelines and checklists. Please [open an issue](https://github.com/stickerdaniel/linkedin-mcp-server/issues) first to discuss the feature or bug fix before submitting a PR. From 0222513ee99a28bddcf8dd5a1d241101116163a2 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 19:12:31 +0100 Subject: [PATCH 548/565] fix(mcpb): pin supported python --- .python-version | 2 +- pyproject.toml | 4 +- uv.lock | 378 +----------------------------------------------- 3 files changed, 5 insertions(+), 379 deletions(-) diff --git a/.python-version b/.python-version index 6324d401..24ee5b1b 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.14 +3.13 diff --git a/pyproject.toml b/pyproject.toml index 44f28c84..8d9eac55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] name = "linkedin-scraper-mcp" -version = "4.5.1" +version = "4.5.2" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" -requires-python = ">=3.12" +requires-python = ">=3.12,<3.14" authors = [ { name = "Daniel Sticker", email = "daniel@sticker.name" } ] diff --git a/uv.lock b/uv.lock index ec5cccb6..0527d6bf 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.12" +requires-python = ">=3.12, <3.14" [[package]] name = "aiofile" @@ -72,40 +72,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, - { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, - { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, - { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, - { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, - { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, - { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, - { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, - { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, - { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, - { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, - { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, - { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, - { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, - { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, - { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, - { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, - { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, - { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, - { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, - { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, ] [[package]] @@ -218,10 +184,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" }, { url = "https://files.pythonhosted.org/packages/9c/12/c39ae2a4037cb10ad5eb3578eb4d5f8c1a2575c62bba675f3406b7ef0824/caio-0.9.25-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:1a177d4777141b96f175fe2c37a3d96dec7911ed9ad5f02bac38aaa1c936611f", size = 81523, upload-time = "2026-03-04T22:08:25.187Z" }, { url = "https://files.pythonhosted.org/packages/22/59/f8f2e950eb4f1a5a3883e198dca514b9d475415cb6cd7b78b9213a0dd45a/caio-0.9.25-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9ed3cfb28c0e99fec5e208c934e5c157d0866aa9c32aa4dc5e9b6034af6286b7", size = 80243, upload-time = "2026-03-04T22:08:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" }, - { url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" }, - { url = "https://files.pythonhosted.org/packages/87/a4/e534cf7d2d0e8d880e25dd61e8d921ffcfe15bd696734589826f5a2df727/caio-0.9.25-cp314-cp314-manylinux_2_34_aarch64.whl", hash = "sha256:628a630eb7fb22381dd8e3c8ab7f59e854b9c806639811fc3f4310c6bd711d79", size = 81565, upload-time = "2026-03-04T22:08:27.483Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ed/bf81aeac1d290017e5e5ac3e880fd56ee15e50a6d0353986799d1bc5cfd5/caio-0.9.25-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:0ba16aa605ccb174665357fc729cf500679c2d94d5f1458a6f0d5ca48f2060a7", size = 80071, upload-time = "2026-03-04T22:08:28.751Z" }, { url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" }, ] @@ -267,28 +229,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, - { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, - { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, - { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, - { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, - { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, - { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, - { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, - { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, - { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, - { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, - { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, - { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, - { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, - { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, - { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] [[package]] @@ -372,36 +312,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, - { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, - { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, - { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, - { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, - { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, - { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, - { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, - { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, - { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, - { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, - { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, - { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, - { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, - { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, - { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, - { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, - { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, - { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, - { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, - { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, - { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, - { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, - { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, - { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, - { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, - { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, ] @@ -428,20 +338,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, - { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, @@ -651,38 +547,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, - { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, - { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, - { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, - { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, - { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, - { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, - { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, - { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, - { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, - { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, - { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, - { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, - { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, - { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, - { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, - { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, - { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, - { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, - { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] @@ -710,23 +574,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, - { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, - { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, - { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, - { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, - { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, - { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, - { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, - { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, - { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, ] [[package]] @@ -951,7 +798,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.5.1" +version = "4.5.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, @@ -1107,42 +954,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, - { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, - { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, - { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, - { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, - { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, - { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, - { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, - { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, - { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, - { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, - { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, - { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, - { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, - { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, - { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, - { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, - { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, - { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, - { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, - { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, - { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, - { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, - { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, - { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, - { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, - { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, - { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, - { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, - { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, - { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, ] @@ -1302,36 +1113,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, - { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, - { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, - { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, - { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, - { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, - { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, - { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, - { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, - { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, - { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, - { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, - { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, - { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, - { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, - { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, - { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, - { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, - { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, - { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, - { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] @@ -1426,34 +1207,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, @@ -1616,9 +1369,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, - { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, - { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, ] [[package]] @@ -1656,24 +1406,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, - { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, - { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, - { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, - { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, - { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, - { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] [[package]] @@ -1775,35 +1507,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, - { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, - { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, - { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, - { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, - { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, - { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, - { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, - { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, - { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, - { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, - { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, - { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, - { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, - { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, - { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, - { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, - { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, - { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, - { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, - { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, - { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, - { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, - { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, - { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, - { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, - { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, - { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, ] [[package]] @@ -2009,29 +1712,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, - { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, - { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, - { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, - { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, - { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, - { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, - { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, - { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, - { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, - { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, - { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, - { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, - { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, - { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, - { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, - { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, - { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, - { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, ] [[package]] @@ -2067,24 +1747,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, - { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, - { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, - { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, - { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, - { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, - { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, - { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, - { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, - { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, - { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, - { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, - { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] @@ -2162,42 +1824,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, - { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, - { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, - { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, - { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, - { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, - { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, - { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, - { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, - { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, - { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, - { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, - { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, - { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, - { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, - { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, - { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, - { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, - { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, - { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, - { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, - { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, - { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, - { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, - { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, - { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, - { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, - { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, - { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, - { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, ] From d8c65064128c7135f9a34d5c393c77446cbd411e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 21 Mar 2026 18:19:49 +0000 Subject: [PATCH 549/565] chore: update manifest.json and docker-compose.yml to v4.5.2 [skip ci] --- docker-compose.yml | 2 +- manifest.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index f5814f62..222b7710 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.5.1 + image: stickerdaniel/linkedin-mcp-server:4.5.2 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 2de6e8c8..7296f619 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "manifest_version": "0.4", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.5.1", + "version": "4.5.2", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account with an MCP Bundle (MCPB, formerly DXT). The bundle starts quickly, downloads the Patchright Chromium browser in the background when needed, and opens LinkedIn login on the first auth-requiring tool call.\n\n## First-time managed runtime flow\n\n1. Install the `.mcpb` bundle in Claude Desktop.\n2. Start Claude Desktop; the MCP server starts and begins preparing the Patchright Chromium browser cache under `~/.linkedin-mcp/patchright-browsers`.\n3. If you call a tool before setup finishes, the tool returns a setup-in-progress error.\n4. On the first tool call that needs authentication, a browser window opens so you can sign into LinkedIn.\n5. Retry the tool after login completes.\n\nDocker remains available as a separate runtime path, but it still requires host-side `--login`.", "author": { From 217f47f16f2c83aa4c2cd15ac61772e89075c581 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sat, 21 Mar 2026 19:36:01 +0100 Subject: [PATCH 550/565] docs(mcpb): simplify install steps --- README.md | 3 +-- RELEASE_NOTES_TEMPLATE.md | 15 +++++++-------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index dd3b2b67..e48f3753 100644 --- a/README.md +++ b/README.md @@ -177,8 +177,7 @@ parallel. Use `--log-level DEBUG` to see scraper lock wait/acquire/release logs. 1. Download the latest `.mcpb` artifact from [releases](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest) 2. Double-click to install it into Claude Desktop -3. Restart Claude Desktop -4. Call any LinkedIn tool +3. Call any LinkedIn tool On startup, the MCP Bundle starts preparing the shared Patchright Chromium browser cache in the background. If you call a tool too early, Claude will surface a setup-in-progress error. On the first tool call that needs authentication, the server opens a LinkedIn login browser window and asks you to retry after sign-in. diff --git a/RELEASE_NOTES_TEMPLATE.md b/RELEASE_NOTES_TEMPLATE.md index 8fa3317d..ffdb6768 100644 --- a/RELEASE_NOTES_TEMPLATE.md +++ b/RELEASE_NOTES_TEMPLATE.md @@ -1,5 +1,12 @@ For an installation guide, refer to the [README](https://github.com/stickerdaniel/linkedin-mcp-server/blob/main/README.md). +## ๐Ÿ“ฆ Update MCP Bundle Installation +**For Claude Desktop users:** +1. Download the `.mcpb` file below +2. Double-click the `.mcpb` file to install in Claude Desktop + +> **Note:** MCP Bundles (MCPB) are the renamed successor to DXT/Desktop Extensions. + ## ๐Ÿณ Update Docker Installation **For users with Docker-based MCP client configurations:** ```bash @@ -10,11 +17,3 @@ To pull this specific version, run: ```bash docker pull stickerdaniel/linkedin-mcp-server:${VERSION} ``` - -## ๐Ÿ“ฆ Update MCP Bundle Installation -**For Claude Desktop users:** -1. Download the `.mcpb` file below -2. Double-click the `.mcpb` file to install in Claude Desktop -3. Restart Claude Desktop - -> **Note:** MCP Bundles (MCPB) are the renamed successor to DXT/Desktop Extensions. From 5fdfdc044b9d161ec34b4d6cd1e9c7cc26d59f31 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 22 Mar 2026 09:50:29 +0100 Subject: [PATCH 551/565] fix(docker): use Python 3.13 base image to fix permission denied error The Python 3.14 base image doesn't satisfy the project's requires-python "<3.14" constraint, causing uv to download Python 3.13 into /root/.local/share/uv/python/. The venv symlink then points into /root/ which is inaccessible to the non-root pwuser, resulting in "Permission denied (os error 13)" at startup. Switching the base image to python:3.13-slim-bookworm means the system Python satisfies the constraint directly, so uv uses it in-place and the symlink issue disappears. Renovate will pin the digest automatically. Fixes #257 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index b88f9b15..23f18c1f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # Use slim Python base instead of full Playwright image (saves ~300-400 MB) # Only Chromium is installed, not Firefox/WebKit -FROM python:3.14-slim-bookworm@sha256:55e465cb7e50cd1d7217fcb5386aa87d0356ca2cd790872142ef68d9ef6812b4 +FROM python:3.13-slim-bookworm # Install uv package manager COPY --from=ghcr.io/astral-sh/uv:latest@sha256:3472e43b4e738cf911c99d41bb34331280efad54c73b1def654a6227bb59b2b4 /uv /uvx /bin/ From bb3513516273d3ba31686507b01106387541f783 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Sun, 22 Mar 2026 09:58:59 +0100 Subject: [PATCH 552/565] chore: bump version to 4.5.3 Fixes #257 --- docker-compose.yml | 2 +- manifest.json | 2 +- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 222b7710..f869fa57 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: linkedin-mcp: - image: stickerdaniel/linkedin-mcp-server:4.5.2 + image: stickerdaniel/linkedin-mcp-server:4.5.3 volumes: - ~/.linkedin-mcp:/home/pwuser/.linkedin-mcp environment: diff --git a/manifest.json b/manifest.json index 7296f619..e643804e 100644 --- a/manifest.json +++ b/manifest.json @@ -2,7 +2,7 @@ "manifest_version": "0.4", "name": "linkedin-mcp-server", "display_name": "LinkedIn MCP Server", - "version": "4.5.2", + "version": "4.5.3", "description": "Connect Claude to LinkedIn for profiles, companies, job details, and people search", "long_description": "# LinkedIn MCP Server\n\nConnect Claude to your LinkedIn account with an MCP Bundle (MCPB, formerly DXT). The bundle starts quickly, downloads the Patchright Chromium browser in the background when needed, and opens LinkedIn login on the first auth-requiring tool call.\n\n## First-time managed runtime flow\n\n1. Install the `.mcpb` bundle in Claude Desktop.\n2. Start Claude Desktop; the MCP server starts and begins preparing the Patchright Chromium browser cache under `~/.linkedin-mcp/patchright-browsers`.\n3. If you call a tool before setup finishes, the tool returns a setup-in-progress error.\n4. On the first tool call that needs authentication, a browser window opens so you can sign into LinkedIn.\n5. Retry the tool after login completes.\n\nDocker remains available as a separate runtime path, but it still requires host-side `--login`.", "author": { diff --git a/pyproject.toml b/pyproject.toml index 8d9eac55..323d1d9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-scraper-mcp" -version = "4.5.2" +version = "4.5.3" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12,<3.14" diff --git a/uv.lock b/uv.lock index 0527d6bf..158fee76 100644 --- a/uv.lock +++ b/uv.lock @@ -798,7 +798,7 @@ wheels = [ [[package]] name = "linkedin-scraper-mcp" -version = "4.5.2" +version = "4.5.3" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From e42ce3ea1e0873a2bd030d9bf5b3ac82b893ad01 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 18:23:17 +0100 Subject: [PATCH 553/565] feat: add bulk connections export tools (get_my_connections, extract_contact_details) Two new MCP tools for collecting LinkedIn connections and enriching them with contact details (email, phone, etc.) in rate-limit-aware chunked batches. Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 183 +++++++++++++++++++++- linkedin_mcp_server/server.py | 2 + linkedin_mcp_server/tools/connections.py | 156 ++++++++++++++++++ 3 files changed, 340 insertions(+), 1 deletion(-) create mode 100644 linkedin_mcp_server/tools/connections.py diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 42c2e774..09e1aa16 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -4,7 +4,7 @@ from dataclasses import dataclass import logging import re -from typing import Any, Literal +from typing import Any, Callable, Awaitable, Literal from urllib.parse import quote_plus from patchright.async_api import Page, TimeoutError as PlaywrightTimeoutError @@ -1156,3 +1156,184 @@ async def _extract_root_content( {"selectors": selectors}, ) return result + + # ------------------------------------------------------------------ + # Connections bulk export + # ------------------------------------------------------------------ + + async def scrape_connections_list( + self, + limit: int = 0, + max_scrolls: int = 50, + ) -> dict[str, Any]: + """Scrape the authenticated user's connections list via infinite scroll. + + Args: + limit: Maximum connections to return (0 = unlimited). + max_scrolls: Maximum scroll iterations (~1s pause each). + + Returns: + {connections: [{username, name, headline}, ...], total, url, pages_visited} + """ + url = "https://www.linkedin.com/mynetwork/invite-connect/connections/" + + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await detect_rate_limit(self._page) + + try: + await self._page.wait_for_selector("main", timeout=10000) + except PlaywrightTimeoutError: + logger.debug("No
element on connections page") + + await handle_modal_close(self._page) + + # Deep scroll to load all connections (infinite scroll) + await scroll_to_bottom(self._page, pause_time=1.0, max_scrolls=max_scrolls) + + # Extract connection data from profile link elements + raw_connections: list[dict[str, str]] = await self._page.evaluate( + """() => { + const results = []; + const seen = new Set(); + const links = document.querySelectorAll('main a[href*="/in/"]'); + for (const a of links) { + const href = a.getAttribute('href') || ''; + const match = href.match(/\\/in\\/([^/?#]+)/); + if (!match) continue; + const username = match[1]; + if (seen.has(username)) continue; + seen.add(username); + + // Walk up to the connection card container + const card = a.closest('li') || a.parentElement; + + // Name: try known selectors, then the link's own visible text + let name = ''; + if (card) { + const nameEl = card.querySelector( + '.mn-connection-card__name, .entity-result__title-text, span[dir="ltr"], span.t-bold' + ); + if (nameEl) name = nameEl.innerText.trim(); + } + if (!name) { + // The profile link itself often contains the person's name + const linkText = a.innerText.trim(); + if (linkText && linkText.length < 80) name = linkText; + } + + // Headline: try known selectors, then parse card text + let headline = ''; + if (card) { + const headlineEl = card.querySelector( + '.mn-connection-card__occupation, .entity-result__primary-subtitle, span.t-normal' + ); + if (headlineEl) headline = headlineEl.innerText.trim(); + } + if (!headline && card) { + // Fallback: split card text by newlines, second non-empty line is usually headline + const lines = card.innerText.split('\\n').map(l => l.trim()).filter(Boolean); + if (lines.length >= 2) headline = lines[1]; + } + + results.push({ username, name, headline }); + } + return results; + }""" + ) + + # Apply limit + if limit > 0: + raw_connections = raw_connections[:limit] + + return { + "connections": raw_connections, + "total": len(raw_connections), + "url": url, + "pages_visited": [url], + } + + async def scrape_contact_batch( + self, + usernames: list[str], + chunk_size: int = 5, + chunk_delay: float = 30.0, + progress_cb: Callable[[int, int], Awaitable[None]] | None = None, + ) -> dict[str, Any]: + """Enrich a list of profiles with contact details in chunked batches. + + For each username: scrapes main profile + contact_info overlay. + + Args: + usernames: List of LinkedIn usernames to enrich. + chunk_size: Profiles per chunk before a long pause. + chunk_delay: Seconds to pause between chunks. + progress_cb: Optional async callback(completed, total) for progress. + + Returns: + {contacts: [{username, name, headline, email, phone, location, ...}], + total, failed, rate_limited, pages_visited} + """ + contacts: list[dict[str, Any]] = [] + failed: list[str] = [] + pages_visited: list[str] = [] + total = len(usernames) + rate_limited = False + + for chunk_idx in range(0, total, chunk_size): + chunk = usernames[chunk_idx : chunk_idx + chunk_size] + + for username in chunk: + profile_url = f"https://www.linkedin.com/in/{username}/" + contact_url = f"https://www.linkedin.com/in/{username}/overlay/contact-info/" + + try: + # Scrape main profile page + profile_text = await self.extract_page(profile_url) + pages_visited.append(profile_url) + + # Scrape contact info overlay + contact_text = await self._extract_overlay(contact_url) + pages_visited.append(contact_url) + + contacts.append({ + "username": username, + "profile": profile_text, + "contact_info": contact_text, + }) + + except RateLimitError: + logger.warning("Rate limited during contact batch at %s", username) + rate_limited = True + break + except Exception as e: + logger.warning("Failed to scrape %s: %s", username, e) + failed.append(username) + + # Brief delay between individual profiles + await asyncio.sleep(_NAV_DELAY) + + if rate_limited: + break + + # Report progress after each chunk + completed = min(chunk_idx + len(chunk), total) + if progress_cb: + await progress_cb(completed, total) + + # Pause between chunks (skip after last chunk) + if chunk_idx + chunk_size < total: + logger.info( + "Chunk complete (%d/%d). Pausing %.0fs...", + completed, + total, + chunk_delay, + ) + await asyncio.sleep(chunk_delay) + + return { + "contacts": contacts, + "total": len(contacts), + "failed": failed, + "rate_limited": rate_limited, + "pages_visited": pages_visited, + } diff --git a/linkedin_mcp_server/server.py b/linkedin_mcp_server/server.py index bd6a6e4e..fa8a6f9a 100644 --- a/linkedin_mcp_server/server.py +++ b/linkedin_mcp_server/server.py @@ -23,6 +23,7 @@ SequentialToolExecutionMiddleware, ) from linkedin_mcp_server.tools.company import register_company_tools +from linkedin_mcp_server.tools.connections import register_connections_tools from linkedin_mcp_server.tools.job import register_job_tools from linkedin_mcp_server.tools.person import register_person_tools @@ -58,6 +59,7 @@ def create_mcp_server() -> FastMCP: register_person_tools(mcp) register_company_tools(mcp) register_job_tools(mcp) + register_connections_tools(mcp) # Register session management tool @mcp.tool( diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py new file mode 100644 index 00000000..60f3df36 --- /dev/null +++ b/linkedin_mcp_server/tools/connections.py @@ -0,0 +1,156 @@ +""" +LinkedIn connections bulk export tools. + +Provides tools for collecting connection usernames via infinite scroll +and enriching profiles with contact details in chunked batches. +""" + +import logging +from typing import Any + +from fastmcp import Context, FastMCP +from mcp.types import ToolAnnotations + +from linkedin_mcp_server.drivers.browser import ( + ensure_authenticated, + get_or_create_browser, +) +from linkedin_mcp_server.error_handler import handle_tool_error +from linkedin_mcp_server.scraping import LinkedInExtractor + +logger = logging.getLogger(__name__) + + +def register_connections_tools(mcp: FastMCP) -> None: + """Register all connections-related tools with the MCP server.""" + + @mcp.tool( + annotations=ToolAnnotations( + title="Get My Connections", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) + async def get_my_connections( + ctx: Context, + limit: int = 0, + max_scrolls: int = 50, + ) -> dict[str, Any]: + """ + Collect the authenticated user's LinkedIn connections via infinite scroll. + + Navigates to the connections page and scrolls to load all connection cards, + then extracts username, name, and headline from each. + + Args: + ctx: FastMCP context for progress reporting + limit: Maximum connections to return (0 = unlimited, default 0) + max_scrolls: Maximum scroll iterations, ~1s pause each (default 50) + + Returns: + Dict with connections (list of {username, name, headline}), total count, + url visited, and pages_visited list. + """ + try: + await ensure_authenticated() + + logger.info("Collecting connections (limit=%d, max_scrolls=%d)", limit, max_scrolls) + + browser = await get_or_create_browser() + extractor = LinkedInExtractor(browser.page) + + await ctx.report_progress( + progress=0, total=100, message="Loading connections page" + ) + + result = await extractor.scrape_connections_list( + limit=limit, max_scrolls=max_scrolls + ) + + await ctx.report_progress(progress=100, total=100, message="Complete") + + return result + + except Exception as e: + return handle_tool_error(e, "get_my_connections") + + @mcp.tool( + annotations=ToolAnnotations( + title="Extract Contact Details", + readOnlyHint=True, + destructiveHint=False, + openWorldHint=True, + ) + ) + async def extract_contact_details( + usernames: str, + ctx: Context, + chunk_size: int = 5, + chunk_delay: int = 30, + ) -> dict[str, Any]: + """ + Enrich LinkedIn profiles with contact details (email, phone, etc.) in chunked batches. + + For each username, scrapes the main profile page and the contact info overlay. + Processes profiles in chunks with configurable delays to avoid rate limiting. + + Args: + usernames: Comma-separated LinkedIn usernames (e.g. "johndoe,janedoe,bobsmith") + ctx: FastMCP context for progress reporting + chunk_size: Number of profiles per chunk before pausing (default 5) + chunk_delay: Seconds to pause between chunks (default 30) + + Returns: + Dict with contacts (list of {username, profile, contact_info}), + total enriched, failed usernames, rate_limited flag, and pages_visited. + """ + try: + await ensure_authenticated() + + username_list = [u.strip() for u in usernames.split(",") if u.strip()] + + if not username_list: + return { + "error": "invalid_input", + "message": "No valid usernames provided. Pass comma-separated usernames.", + } + + logger.info( + "Enriching %d profiles (chunk_size=%d, chunk_delay=%ds)", + len(username_list), + chunk_size, + chunk_delay, + ) + + browser = await get_or_create_browser() + extractor = LinkedInExtractor(browser.page) + + total = len(username_list) + + await ctx.report_progress( + progress=0, total=total, message=f"Starting enrichment of {total} profiles" + ) + + async def on_progress(completed: int, total: int) -> None: + await ctx.report_progress( + progress=completed, + total=total, + message=f"Enriched {completed}/{total} profiles", + ) + + result = await extractor.scrape_contact_batch( + usernames=username_list, + chunk_size=chunk_size, + chunk_delay=float(chunk_delay), + progress_cb=on_progress, + ) + + await ctx.report_progress( + progress=total, total=total, message="Complete" + ) + + return result + + except Exception as e: + return handle_tool_error(e, "extract_contact_details") From ff8f3cbd245b9ea73a7d9a0de038f8e2c7efde36 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 18:26:22 +0100 Subject: [PATCH 554/565] fix: lint formatting and missing RateLimitError import Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 17 +++++++++++------ linkedin_mcp_server/tools/connections.py | 12 +++++++----- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 09e1aa16..4a72f8cf 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -17,6 +17,7 @@ from linkedin_mcp_server.core.exceptions import ( AuthenticationError, LinkedInScraperException, + RateLimitError, ) from linkedin_mcp_server.debug_trace import record_page_trace from linkedin_mcp_server.debug_utils import stabilize_navigation @@ -1284,7 +1285,9 @@ async def scrape_contact_batch( for username in chunk: profile_url = f"https://www.linkedin.com/in/{username}/" - contact_url = f"https://www.linkedin.com/in/{username}/overlay/contact-info/" + contact_url = ( + f"https://www.linkedin.com/in/{username}/overlay/contact-info/" + ) try: # Scrape main profile page @@ -1295,11 +1298,13 @@ async def scrape_contact_batch( contact_text = await self._extract_overlay(contact_url) pages_visited.append(contact_url) - contacts.append({ - "username": username, - "profile": profile_text, - "contact_info": contact_text, - }) + contacts.append( + { + "username": username, + "profile": profile_text, + "contact_info": contact_text, + } + ) except RateLimitError: logger.warning("Rate limited during contact batch at %s", username) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index 60f3df36..741bce79 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -55,7 +55,9 @@ async def get_my_connections( try: await ensure_authenticated() - logger.info("Collecting connections (limit=%d, max_scrolls=%d)", limit, max_scrolls) + logger.info( + "Collecting connections (limit=%d, max_scrolls=%d)", limit, max_scrolls + ) browser = await get_or_create_browser() extractor = LinkedInExtractor(browser.page) @@ -129,7 +131,9 @@ async def extract_contact_details( total = len(username_list) await ctx.report_progress( - progress=0, total=total, message=f"Starting enrichment of {total} profiles" + progress=0, + total=total, + message=f"Starting enrichment of {total} profiles", ) async def on_progress(completed: int, total: int) -> None: @@ -146,9 +150,7 @@ async def on_progress(completed: int, total: int) -> None: progress_cb=on_progress, ) - await ctx.report_progress( - progress=total, total=total, message="Complete" - ) + await ctx.report_progress(progress=total, total=total, message="Complete") return result From a7e90e08ad0efd0cb564532e1702d72d288c2888 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 18:53:39 +0100 Subject: [PATCH 555/565] =?UTF-8?q?fix:=20address=20Greptile=20review=20?= =?UTF-8?q?=E2=80=94=20chunk=5Fdelay=20type=20and=20progress=20accuracy?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - chunk_delay: int โ†’ float to match scrape_contact_batch signature - Report actual completed count instead of total on early rate-limit stop Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/tools/connections.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index 741bce79..aa7534f1 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -89,7 +89,7 @@ async def extract_contact_details( usernames: str, ctx: Context, chunk_size: int = 5, - chunk_delay: int = 30, + chunk_delay: float = 30.0, ) -> dict[str, Any]: """ Enrich LinkedIn profiles with contact details (email, phone, etc.) in chunked batches. @@ -146,11 +146,14 @@ async def on_progress(completed: int, total: int) -> None: result = await extractor.scrape_contact_batch( usernames=username_list, chunk_size=chunk_size, - chunk_delay=float(chunk_delay), + chunk_delay=chunk_delay, progress_cb=on_progress, ) - await ctx.report_progress(progress=total, total=total, message="Complete") + completed = result["total"] + await ctx.report_progress( + progress=completed, total=total, message="Complete" + ) return result From 71b422637f8460cab70173b28aac626ae3f8bc89 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 19:00:56 +0100 Subject: [PATCH 556/565] feat: parse extract_contact_details into structured fields Instead of returning raw innerText blobs, parse profile and contact overlay text into structured fields (first_name, last_name, email, phone, headline, location, company, website, birthday). Raw text kept as _raw suffix fields for fallback. Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 87 ++++++++++++++++++++++- linkedin_mcp_server/tools/connections.py | 10 ++- 2 files changed, 93 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 4a72f8cf..3ada6211 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -156,6 +156,87 @@ def _truncate_linkedin_noise(text: str) -> str: return text[:earliest].strip() +def _parse_contact_record( + profile_text: str, contact_text: str +) -> dict[str, str | None]: + """Parse raw innerText blobs into structured contact fields. + + Profile text layout (first lines): + Name\\n\\nยท 1st\\n\\nHeadline\\n\\nLocation\\n\\nยท\\n\\nContact info\\n\\nCompany + + Contact info overlay layout: + Email\\n\\nuser@example.com\\n\\nPhone\\n\\n+123...\\n\\n... + """ + result: dict[str, str | None] = { + "first_name": None, + "last_name": None, + "headline": None, + "location": None, + "company": None, + "email": None, + "phone": None, + "website": None, + "birthday": None, + } + + # --- Parse profile text --- + if profile_text: + lines = [ln.strip() for ln in profile_text.split("\n")] + non_empty = [ln for ln in lines if ln] + + if non_empty: + # Line 1 โ†’ full name + full_name = non_empty[0] + parts = full_name.split(None, 1) + result["first_name"] = parts[0] if parts else full_name + result["last_name"] = parts[1] if len(parts) > 1 else None + + # Find connection degree marker (ยท 1st, ยท 2nd, ยท 3rd) + degree_idx: int | None = None + for i, ln in enumerate(non_empty): + if re.match(r"^ยท\s*\d+(st|nd|rd)$", ln): + degree_idx = i + break + + if degree_idx is not None and degree_idx + 1 < len(non_empty): + result["headline"] = non_empty[degree_idx + 1] + + # Location is the next non-empty line after headline + if degree_idx + 2 < len(non_empty): + candidate = non_empty[degree_idx + 2] + # Skip if it's just the "ยท" separator or "Contact info" + if candidate not in ("ยท", "Contact info"): + result["location"] = candidate + + # Company: line after "Contact info" + for i, ln in enumerate(non_empty): + if ln == "Contact info" and i + 1 < len(non_empty): + result["company"] = non_empty[i + 1] + break + + # --- Parse contact info overlay --- + if contact_text: + # Extract labeled fields: "Label\n\nvalue" + for field, label in [ + ("email", "Email"), + ("phone", "Phone"), + ("birthday", "Birthday"), + ]: + match = re.search( + rf"(?:^|\n){re.escape(label)}\s*\n\s*\n\s*(.+)", + contact_text, + ) + if match: + result[field] = match.group(1).strip() + + # Website may include a type annotation like "(Blog)" or "(Portfolio)" + match = re.search(r"(?:^|\n)Website\s*\n\s*\n\s*(.+)", contact_text) + if match: + result["website"] = match.group(1).strip() + + return result + + class LinkedInExtractor: """Extracts LinkedIn page content via navigate-scroll-innerText pattern.""" @@ -1298,11 +1379,13 @@ async def scrape_contact_batch( contact_text = await self._extract_overlay(contact_url) pages_visited.append(contact_url) + parsed = _parse_contact_record(profile_text, contact_text) contacts.append( { "username": username, - "profile": profile_text, - "contact_info": contact_text, + **parsed, + "profile_raw": profile_text, + "contact_info_raw": contact_text, } ) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index aa7534f1..355e416d 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -104,8 +104,14 @@ async def extract_contact_details( chunk_delay: Seconds to pause between chunks (default 30) Returns: - Dict with contacts (list of {username, profile, contact_info}), - total enriched, failed usernames, rate_limited flag, and pages_visited. + Dict with contacts (list of structured records), total enriched, + failed usernames, rate_limited flag, and pages_visited. + + Each contact record contains: + - username, first_name, last_name: Identity fields + - email, phone, website, birthday: From the contact info overlay + - headline, location, company: From the main profile page + - profile_raw, contact_info_raw: Original innerText as fallback """ try: await ensure_authenticated() From ef8dc9fd5dcb824081dabf2bb0ec444cc97f8c55 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 19:05:10 +0100 Subject: [PATCH 557/565] =?UTF-8?q?fix:=20address=20review=20=E2=80=94=20l?= =?UTF-8?q?og=20format=20specifier=20and=20docstring=20accuracy?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Use %.0fs for chunk_delay in log message (float, not int) - Update scrape_contact_batch docstring to list actual structured fields Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 4 +++- linkedin_mcp_server/tools/connections.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 3ada6211..64ccf0b7 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -1352,7 +1352,9 @@ async def scrape_contact_batch( progress_cb: Optional async callback(completed, total) for progress. Returns: - {contacts: [{username, name, headline, email, phone, location, ...}], + {contacts: [{username, first_name, last_name, email, phone, + headline, location, company, website, birthday, + profile_raw, contact_info_raw}], total, failed, rate_limited, pages_visited} """ contacts: list[dict[str, Any]] = [] diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index 355e416d..66296fb5 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -125,7 +125,7 @@ async def extract_contact_details( } logger.info( - "Enriching %d profiles (chunk_size=%d, chunk_delay=%ds)", + "Enriching %d profiles (chunk_size=%d, chunk_delay=%.0fs)", len(username_list), chunk_size, chunk_delay, From cfc15daff975d19561485a1457a3f1d81591f2a0 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 19:16:03 +0100 Subject: [PATCH 558/565] fix: show rate-limit context in progress message instead of "Complete" When rate limiting stops processing early, the progress message now shows "Stopped early due to rate limit (N/M processed)" instead of the misleading "Complete". Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/tools/connections.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index 66296fb5..d37a21de 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -157,9 +157,12 @@ async def on_progress(completed: int, total: int) -> None: ) completed = result["total"] - await ctx.report_progress( - progress=completed, total=total, message="Complete" + msg = ( + "Complete" + if not result.get("rate_limited") + else f"Stopped early due to rate limit ({completed}/{total} processed)" ) + await ctx.report_progress(progress=completed, total=total, message=msg) return result From 7e1b1ca44d67614ff76374e0f3b944eb323a7b02 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 19:16:40 +0100 Subject: [PATCH 559/565] Update linkedin_mcp_server/tools/connections.py Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com> --- linkedin_mcp_server/tools/connections.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index d37a21de..07788f81 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -158,9 +158,10 @@ async def on_progress(completed: int, total: int) -> None: completed = result["total"] msg = ( - "Complete" - if not result.get("rate_limited") - else f"Stopped early due to rate limit ({completed}/{total} processed)" + completed = result["total"] + msg = "Complete" if not result.get("rate_limited") else f"Stopped early due to rate limit ({completed}/{total} processed)" + await ctx.report_progress( + progress=completed, total=total, message=msg ) await ctx.report_progress(progress=completed, total=total, message=msg) From 6b58d3441e7f144504248c6994c287ad10012501 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 19:25:40 +0100 Subject: [PATCH 560/565] fix: resolve duplicate code from accepted Greptile suggestion The GitHub suggestion merge created duplicate lines (completed/msg assigned twice, report_progress called twice). Cleaned up to single correct version. Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/tools/connections.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index 07788f81..d37a21de 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -158,10 +158,9 @@ async def on_progress(completed: int, total: int) -> None: completed = result["total"] msg = ( - completed = result["total"] - msg = "Complete" if not result.get("rate_limited") else f"Stopped early due to rate limit ({completed}/{total} processed)" - await ctx.report_progress( - progress=completed, total=total, message=msg + "Complete" + if not result.get("rate_limited") + else f"Stopped early due to rate limit ({completed}/{total} processed)" ) await ctx.report_progress(progress=completed, total=total, message=msg) From 50e6a59dcf7bb344696314768a0743efb66e3674 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 20:06:28 +0100 Subject: [PATCH 561/565] =?UTF-8?q?fix:=20address=20Greptile=20review=20?= =?UTF-8?q?=E2=80=94=20sentinel=20guard,=20chunk=5Fsize=20validation,=20re?= =?UTF-8?q?gex,=20failed=20tracking?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Guard against _RATE_LIMITED_MSG sentinel corrupting parsed records (skip profile on soft rate limit, fall back to empty contact text) - Validate chunk_size > 0 with clear error message - Extend degree regex to match ordinals like "3rd+" and "4th" - Add rate-limited username to failed list for caller resumability Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 64ccf0b7..855519ef 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -191,10 +191,10 @@ def _parse_contact_record( result["first_name"] = parts[0] if parts else full_name result["last_name"] = parts[1] if len(parts) > 1 else None - # Find connection degree marker (ยท 1st, ยท 2nd, ยท 3rd) + # Find connection degree marker (ยท 1st, ยท 2nd, ยท 3rd, ยท 3rd+) degree_idx: int | None = None for i, ln in enumerate(non_empty): - if re.match(r"^ยท\s*\d+(st|nd|rd)$", ln): + if re.match(r"^ยท\s*\d+(st|nd|rd|th)\+?$", ln): degree_idx = i break @@ -1357,6 +1357,9 @@ async def scrape_contact_batch( profile_raw, contact_info_raw}], total, failed, rate_limited, pages_visited} """ + if chunk_size <= 0: + raise ValueError(f"chunk_size must be a positive integer, got {chunk_size}") + contacts: list[dict[str, Any]] = [] failed: list[str] = [] pages_visited: list[str] = [] @@ -1377,10 +1380,23 @@ async def scrape_contact_batch( profile_text = await self.extract_page(profile_url) pages_visited.append(profile_url) + if profile_text == _RATE_LIMITED_MSG: + logger.warning( + "Soft rate limit on profile %s, skipping", username + ) + failed.append(username) + await asyncio.sleep(_NAV_DELAY) + continue + # Scrape contact info overlay contact_text = await self._extract_overlay(contact_url) pages_visited.append(contact_url) + if contact_text == _RATE_LIMITED_MSG: + contact_text = ( + "" # fall back to empty; parsed fields will be None + ) + parsed = _parse_contact_record(profile_text, contact_text) contacts.append( { @@ -1393,6 +1409,7 @@ async def scrape_contact_batch( except RateLimitError: logger.warning("Rate limited during contact batch at %s", username) + failed.append(username) rate_limited = True break except Exception as e: From 5fd323e82044419550adc0fdff271b02ba2c91cf Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sat, 28 Feb 2026 20:10:39 +0100 Subject: [PATCH 562/565] fix: deduplicate usernames in extract_contact_details Prevents scraping the same profile twice when duplicate usernames are passed (e.g. "user1,user1,user2"). Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/tools/connections.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/linkedin_mcp_server/tools/connections.py b/linkedin_mcp_server/tools/connections.py index d37a21de..bc73a920 100644 --- a/linkedin_mcp_server/tools/connections.py +++ b/linkedin_mcp_server/tools/connections.py @@ -116,7 +116,9 @@ async def extract_contact_details( try: await ensure_authenticated() - username_list = [u.strip() for u in usernames.split(",") if u.strip()] + username_list = list( + dict.fromkeys(u.strip() for u in usernames.split(",") if u.strip()) + ) if not username_list: return { From 44dbed527c408f0fadfeb3db32507acd438c3a94 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sun, 1 Mar 2026 00:08:46 +0100 Subject: [PATCH 563/565] feat: Add network degree filter to search_people Support filtering LinkedIn people search by connection degree (1st/2nd/3rd+) via the `network` parameter passed through to LinkedIn's search URL. Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 10 ++++++++++ linkedin_mcp_server/tools/person.py | 8 ++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index 855519ef..a06616ed 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -1097,15 +1097,25 @@ async def search_people( self, keywords: str, location: str | None = None, + network: str | None = None, ) -> dict[str, Any]: """Search for people and extract the results page. + Args: + keywords: Search keywords. + location: Optional location filter. + network: Optional connection degree filter. + "F" = 1st degree, "S" = 2nd degree, "O" = 3rd+. + Returns: {url, sections: {name: text}} """ params = f"keywords={quote_plus(keywords)}" if location: params += f"&location={quote_plus(location)}" + if network: + # LinkedIn expects network=%5B%22F%22%5D (URL-encoded ["F"]) + params += f"&network=%5B%22{quote_plus(network)}%22%5D" url = f"https://www.linkedin.com/search/results/people/?{params}" extracted = await self.extract_page(url, section_name="search_results") diff --git a/linkedin_mcp_server/tools/person.py b/linkedin_mcp_server/tools/person.py index 79053c24..40a8a5c2 100644 --- a/linkedin_mcp_server/tools/person.py +++ b/linkedin_mcp_server/tools/person.py @@ -91,6 +91,7 @@ async def search_people( keywords: str, ctx: Context, location: str | None = None, + network: str | None = None, extractor: Any | None = None, ) -> dict[str, Any]: """ @@ -100,6 +101,8 @@ async def search_people( keywords: Search keywords (e.g., "software engineer", "recruiter at Google") ctx: FastMCP context for progress reporting location: Optional location filter (e.g., "New York", "Remote") + network: Optional connection degree filter. + "F" = 1st degree, "S" = 2nd degree, "O" = 3rd+. Returns: Dict with url, sections (name -> raw text), and optional references. @@ -110,16 +113,17 @@ async def search_people( ctx, tool_name="search_people" ) logger.info( - "Searching people: keywords='%s', location='%s'", + "Searching people: keywords='%s', location='%s', network='%s'", keywords, location, + network, ) await ctx.report_progress( progress=0, total=100, message="Starting people search" ) - result = await extractor.search_people(keywords, location) + result = await extractor.search_people(keywords, location, network) await ctx.report_progress(progress=100, total=100, message="Complete") From cec9e31b903c3e510d45f9b1a2f8e90d84f273bb Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sun, 1 Mar 2026 00:18:32 +0100 Subject: [PATCH 564/565] fix: Handle navigation context destruction during connections scroll Add stabilization delay after scroll_to_bottom and re-navigate if LinkedIn redirected away from the connections page during infinite scroll. Prevents "Execution context was destroyed" errors. Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index a06616ed..ad050f96 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -1282,6 +1282,20 @@ async def scrape_connections_list( # Deep scroll to load all connections (infinite scroll) await scroll_to_bottom(self._page, pause_time=1.0, max_scrolls=max_scrolls) + # Wait for page to stabilize after scrolling (LinkedIn may trigger + # lazy navigations that destroy the execution context) + await asyncio.sleep(1.0) + + # Ensure we're still on the connections page; re-navigate if needed + current_url = self._page.url + if "/connections" not in current_url: + logger.warning( + "Page navigated away to %s during scroll, re-navigating", + current_url, + ) + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + await asyncio.sleep(2.0) + # Extract connection data from profile link elements raw_connections: list[dict[str, str]] = await self._page.evaluate( """() => { From 5c7d9f49cbd2aa02b2b9a5fabf85d99eb46f3b84 Mon Sep 17 00:00:00 2001 From: Ruslan Strazhnyk Date: Sun, 1 Mar 2026 00:28:48 +0100 Subject: [PATCH 565/565] fix: Handle ERR_ABORTED and context destruction in connections scraper - Catch ERR_ABORTED on initial goto (happens when page is already loaded or LinkedIn redirects during navigation), retry after delay - Add stabilization delay after scroll_to_bottom - Re-navigate if LinkedIn redirected away during infinite scroll Co-Authored-By: Claude Opus 4.6 --- linkedin_mcp_server/scraping/extractor.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/linkedin_mcp_server/scraping/extractor.py b/linkedin_mcp_server/scraping/extractor.py index ad050f96..436e632c 100644 --- a/linkedin_mcp_server/scraping/extractor.py +++ b/linkedin_mcp_server/scraping/extractor.py @@ -1269,7 +1269,17 @@ async def scrape_connections_list( """ url = "https://www.linkedin.com/mynetwork/invite-connect/connections/" - await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + # Navigate โ€” handle ERR_ABORTED (page already loaded / redirect race) + try: + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + except Exception as nav_err: + if "ERR_ABORTED" in str(nav_err): + logger.info("Navigation aborted (page may already be loaded), retrying") + await asyncio.sleep(2.0) + await self._page.goto(url, wait_until="domcontentloaded", timeout=30000) + else: + raise + await detect_rate_limit(self._page) try: @@ -1282,8 +1292,7 @@ async def scrape_connections_list( # Deep scroll to load all connections (infinite scroll) await scroll_to_bottom(self._page, pause_time=1.0, max_scrolls=max_scrolls) - # Wait for page to stabilize after scrolling (LinkedIn may trigger - # lazy navigations that destroy the execution context) + # Stabilize โ€” LinkedIn may trigger lazy navigations during scroll await asyncio.sleep(1.0) # Ensure we're still on the connections page; re-navigate if needed

z%a2OgV_`bPjJ}@jo{zxv#v3wYty-Yp;=IskUlLbTS@1hApS(QDWK;8Y#+s^ocO-}P zs(?g)woV?L2~7@Fc0=p`hJX0XW15Y`;qWXvA@Q{&-juGEi- zNM_2H7*t(ob!*d?qE7ge^46O3K<>~3#I*?X_wD2T2L)4v{il@N3S=-DIJsHbBWmjr zzu9VrDhtxWHJ1@feflVd-Uvwaf$UM)LOJ1*#eEo;@SrDb^_DThg4jn zC=JPZ>wd*7$r8*eOT#@@l5ht6JL4K7s7R8fw2ajQ(xbz+VEJC(Ev)7{1Dl`n5u#q4s+EkP zz!yu0__jD3uiU%wI_}ppbW=-4d?jRG?nS0H;QhnhA~ljLQo|5DmLn8uxA-sW;4PeH zsq!Fx$smU;^o912dYSMcLOc?Y!4Ltfs>JftB!gr^2#g+Ao!LM?jun0Dk?3r8`C}LC zE;5zFa*pM#&6LD4q|s_tHfOd_#uTW&AM}+cm1Va`UR553Vnqk4CnyWm0^CQXFFq{q zjMt#mmEmI}DW~P=*CX&@v=v9DvJZENDb7}f8VY$E4gB43u9PoJaLnM+(+{0oKNS4b z84jlxYCfD2$@u}th*l0im52_^LB)e3lDD7i^_o!5Qn|F)sH~mMhC=|t9Qm`;F1?=D zV*^1IB1c!30OmizDOI{M@*I)@b)Tb>3-n{+;p!S12&dvC2$zD-y3{LaNs7m_1(?yu z5**ZP7!c%>C~ra`%$L}0$P|Xdaddx{3NYSe-;?abEiubhlHZo`){@X z84o{LAMdaMwoNM%q4GHZo`^HZCQ9@RLpKrPNZ&R>UC4Hm_pGdXsX8%Ue=OmRyZlO< zyJQ7-l_dOPo~XErRh(4|kZbBK#;K~elYE3>FA5@<-45S6vzt1^FKahD!XM7P8&JdL zcQxlRy_bLcX%fTDc6#TxZx){m$qutRc7and2Jv?5KsJcEn)iEp|`6W<>5$-l{lJHSe>DJKh( zxFIprAHt)q{|Iazx44+hn6C|IF$Q8|18Cqa4!YDz)PiKy2t?R4xXvM};e%L65N{&2 zLy~j&;S4-L~c5mFA0Q2 zIsgdtMQ;pDN(r{LD7pj*UR_-$I7bn^Bbh9a3a&{O^&HI=A#^*MBkZFfC|ibfZ@=|< zLBj2TMp5H|>tnnBS7oy&y7w&6Qi;Rlmc#Ynx8(2cu$qOj(p)>c(^O|LhE;;y_#f;WR0Tg zz(O4c?b0~X0)D7mXw6YOj*^NyCFO8RJapk{Tj7ojC2=Niw zL5PQL=5%>7o+zgCpw{YpIiuIrc>r+Ev{*5lWhJoEgF~cCq0&Q^cZftN+=DtqWwT^^MU%NNLycy3dgV#wA zn7ZkYgdX4sI@t}|yMZCQ&i~m^#kWX4U3bO=vLyXFq1N)>Tk;B(jtmCM)8)>yRWv{wTZ0NN^o}=44SY1?NMNm8TVQizENpK-0`k7v5z-1S$@aUv*?&)I_0>VB>fp?jD|z~Xm{LEa6L(gBlJ-j!NogFH5D0(e+jmU;o#+{Zg-0!WNy z>sxCxLz;Na5_hsJUN^J~O&0MYCiQ@1d;}IvyVe20*p;?xX|HgC!PDGKkS?&dS(RJ- zUl>=&|LsYc;MBh;KHp!ear0?GSC6VbFi@v# zqYEi2^JlZH`j-}J#jFMGdRNBtol)j_E^<5rG$=RT7B?$UBJoMDuNybT;@FO_PnIHm zy)HIJGqvh{1jT{|#0s50{E+?i0Ykj0+V#rY1GXu$Io{j_#db$2=0eJ5NWgMeuSq1zyXAj!z;H1ndr)0R-&2>LzaX zD~$^7GF%Edf;gb$f?wgCY9N_q@i=49Z|19>i z1E>?)^`7BGDXbp?M9ClpNzB`;n#dvEa*WgdJvqJhX_f5eDp zR4sZjNZxu8fycuJx;S=+#IYQ{P<^k93DIe@c2HOjFI3nx(9IVB2S(C*xuO^38&Uv4!a*a~;K4qorgDX(xl6<8Se-VD9Bo|C^ zq}8gC9BnZWMG+q%zL%F5t^(2 zb|wDTSKI2cm%^a=bU&;VEWMcuv(TD<`Ss)FK2-AR{nxJ!`)N|K5Y#ErPC1NXPpOU) z`K+M1KNza!Ip6UP-&uv>^~a!{Nd@OVEHydD?3iUs3)WZdzlcA3nDMm0;4o>o7}ZrT zSJ%-`i0`%oJY)zK1ywKIAV~}|tY2kl=;`r3?0!fXf90!g_|gX^Nol)yxHh*3%>4c^ zstdxsKG1@Kd)f7Xv)OM+xle(J2eV3-IONZ8DtmZahb!^a)_I>CgH|U3+sBr#Pmd1; zAGnr}Z z(#t=}_I3v174EyLs?QFx7Gr&!7|)OBeCEIS7QB_7A-S=@&nNoNZ>9$Hd@! zi=xVwAZ`3BSiEpRf8K~L`d@#EU|^w4!oY1Ff_%&SqLr8H9SUWFqY2rZ`bKeh8Bm_skW=*lRnXjfr z+X6Pp0t*F^^VCcV=>ND2Oa{CN{`s(_94OnMDAF(6Rmr`L(yTbtXa=Ao@+`Gl+sym|6pRhy<%c|@dkDG_j#pZk<`bkC5p7w<}$ZVqg%I|+SE91#|t<*?4?M13npOUVO(iT zd%NHn0A+FMWxw{vQhakM8B6Oj`lA2q?AINur9%OhWH`RcZjc$Hs>x7q_%2`gP%G$s zWtN95$8)Gu@+CYU=EReZT{DMWuP?28V=0t2892*`jQ(Ys&N)|Y!w~2`u`3DspNVnH za1^hBa;x_h)1d2E^m5wIR|=B_lBpg6=LAEh&tD$JAl!BnaEfDNj_%H41+eJROuv1@ z)HiSdfC({?e5HEz_gmmV7Gpuwce&@>(6kUtXNenfp^);83|6EQ}qNCuLrMG;NXvcq&t@^M(jrNmVn4>oBRamEVu zsV2Miie$~9&8#2Cc$GtrXh@=%)hokvzFVSil*Bx*6henm7->SfUuANKGgrVPz8n~{ zf@P>w2XGcD(7^Ve*$iAhqza{R&Nlnxe%Q~y02mJ5=H`8a zt<^#;DrvryDDXd;r`|PI>CvWOu57-uns^_j3?fY75$_|1`-T0y|8sW`@wi73?W}ci z_5G~>Rv{AzM^oJ9e$w+dJ_H(L-CO%@(;?QN z|K6qIBwA2YM}C{uAKQ^FM4omi)05=D%qIo56$WV1^`VEW!FpOep2>BatV69ZF~w{I z9o-~>_i{4NSR7<+%O%mPo5r&k>4A$#I;xxe(^@w`&g66X(49>Cah$_p123zwR4S#7 zsgR}ZTAoG7SFY`sBo^_YVASFJNTN}pF7}7tE$G&mg~_nr?L)kz&UZ6WkZaf%z;80c z@O@a)gbOeng>s*oxAUo*t+{^vB^LJO^=!ZDUTzr^Q+GR_(SZ^5b(&4tgx49mC1zDMqJs?{O#6eM(&zHt{I&FpH z_$<_JOoiC6m8#bv$UghCg}q4)x+0@8uD2MB0!@yrVD`z1j}Zt&$TtL1P5mM=IVzUP z*~qBEz18E3tD@Kh!4BU8+*Y^Kn9$<8NC+Z9#7Hii&f+x$vutAHB2AlusfWlV%HfVz z^OmiLFa6>XP|bgbR>&e>kEC8(t<@zq^KCB_B$z$5w|JghliDve5hA3VRo~|Pibjh1 zDUJMUj{!H-jHR2s&)(0wW|`zeS`Ce&>iaA?OWhxC_u2Cn8%S))U97W` z06)YFMjI3@hf4>cAJk`)AfJLxf*94eJgwBVAVSIDR~^*r-0Y`hgTe}bgG!0J{m92= zWt7}7oE~)10c|~3uL|jsOY7+&-pjw9%ojs_!y$FKL)E@_7sFf>!k%GeY9+?woqo;~ zM+06|Al)(lp9&jB#e8XSov9S6v09fX&0wfs*dHlEZx$N7WWLFQ4~chs>6sg!6HG1M zKR<19C!XB_;smQaWC14FudRRlY=>3s|8(#L{Mt}rvW}7;^b>48t#;z{X-2*tm0%^H zlh+O>{JVRuow^|&MvpB!m8&2z@Sgu}X z_J?ckZin-90@?iGnoNG4*hZr~gKw2L7ph2hG_D;8I&i%DGyh|Qm!9f{JRis%Ze$od$>H1qgN!}c-`ju z7_&#Fgr!98vHuMIHRsRBGePeehDJm?$l3Sbx8w}k%*|Cz4re}<$*X+rE!i}l(QHx! z9(+dK?=|nft+yBr#Qj_@E>X&HR2(jwE>@XmTXPyP6a4W0)k2vX{c2iZYuJjY&hIu4 zB}}0Rh4&078sW;(Lf+T;pqq%-qGoUPITdcOnoF{oEAKN39L(Q>!UPX@(#ek3RQ_1X7|yJBFSI_^A6Rn=vf&N=dL=2R$hy0ycPQ zyY;6ao6<#M^xa~J@R%E4?>0bQV>llDcHu7R>r+}}t-lrPD3-L_7YMOfkR}##^2>ke z?61hU6Rw{=GsHQfM9`CG!TH=NmU>7d8z;ocy$Ie)d%26{;@-|+80F{&8?(X3rl-Ue z6cSueVM11vkfL*?f^rV~iE$iRykWB)Jhy{A+h7R*gl`PR8(UA||Ts(;Jn zpRvbhE9ADeZ!zz`6yRbCQ*E9R_s|XA{2$uhIx6a}Z5xIGWavR!Y5*xgQefy#kx)Vq zr6iPQ2Ft6~S1r}7)JaHJvs^uJj@FSu0|q9U?0Gx6Q+5pZGNzny?FaSdqvHG9 z1gVEPDaf@EZqXgkWnyeD$!I6(uS>swxA})Ivm$E=QYzGJd^$fI*eXY1D~&>0IP6pr z8L1@Fdj`X8g`P6P3x~K^&tQ+t2|B4_V_Bb;AYohf6{7Buuo&Cd@vP4R59$&^qcWbi zOh1&QyP?`h?(7bYV=Z}ttTQ;^d_`W#dWO=oWisV;oYKW-b`^Mmg2`&nAr|Awbs{@i zu|7U)R|iV1$-6lDr7(L0!KqL=ufd&w`|TpcFP=ux;w`Dzlg%Ry*dma%9q-Vu;ns}V zbYf)om5OEm!(zt^2X6c483 z0DglQ{~CQe8h#?53W%gh)qCYcfVseYeEs2!+`ra^z47xvLkTUzj?ONKnXjYAgA{^US z)i^ReS1L19)TlGPtI=!b*VhRxr)TOc3dj1+pKS+uI#HRajW!)i$T2D>L`lQ5^aX>k z*qT%OJDM&!BxWY24f~$3Y@nXQv-!{t2&Xuj-`|;~-1uCa&K=mAW+z^nncJT>(WO*Q zf_1&}M0Cxfr`vS!RUejBI0h+#%y>`_Q&1bqQQIM6O{02jMuK^{Cm2FS`TEmnV>l3iNN=e_J9tmHmVY9_pGEB~TvW^BSf}p}EF?%aP6$NIytL3+ZAxF5eOdL_DbhS?}w?f;>{^F%LVsQ!r{eBo&N!d^cG4V0S80Rd9l zVWjb#0_887>pdFZheByVZs_5X&a)+p*4)9*{iaJN-@pD6S>tIsW$$Dqzd2vXp;bt} zY9<90h8ftV`Q69#nYYAU`d-4O?$fdm`90A}F6HJF_Za>TOP!_9hOWJLK_N#JA-_wa zml7^`;5;byC6ofK!-ea<@CRh?K{kap@`d#G&xbLvtj!9nR)tCCvDS-$_Rr;|X4gH_ z@)1t!<8p_gE@Irl&x~gs_{9@9c7ngAn6>LpzAlQP=+GV^m`k>5vPl$z8%Pj(DIehO7i3loL{<;H`t;3BGFp@T5ic32jNx zV^NTCX#=h-kmz9Vy9UwFqF0jvgztC(zrp~lYkM;QXXWYSqc2_kCob8923jvrT-rlb z#l&uY3CuZUvW;0!wwwqGk5PM@V3{{NC6@>fYlB^nh|F z#AGUk#iv=R`Fn4zlcfI3A)B3{k?sH*^8VuS08)yB3I@uwz1~_7qvk?cP(KF}3J|_S zLU0$NMILsNIRxcn#icaY}eEE%?{5F zp_JT8$3%xqjYVG*I6cQboXNl+vBf1E_*>VoZOA=k;uq-$qS4L!cE5LP96KM~{S)GPmE1m(aPmxe-bE zf#Tj^sza+9`-m!8Trm$na57yP0B<9CmboSFvdB=OYVt<2u;h>g(kBIB#xjzmq>EcX~U zgji(4#&#ji%2_+bFUmm?*knTV_aK>fU3v`fgp!AP<`S_U9guU4^-kj_PW85aX4u=n z#VP-@<2X7fAgqN|BGB@r|0z1LPq2&xiGf_TtRfmLdi>8fVj!Nt%yNB0mxV{r!GD-b zy{()`r0>{`oab6WX*t?k!WYk-ZycjCu6S6LhYC{|WV6=Lk55lFc&*GK;l82))G zRHOnIL;uUmc>9N}PKJ^aE9JY<7scO+puRyqH8Zf|Zf@n-g1*S$9clXn`POeo_rSX{ zpsnmc|Ay*Z<$TjbH)4jACZga>t?+>>kZWR5poax6gWeZ`3XTv7Vt5Anb7JqpDqBMI zW>xP6Z1|6D(cMh`he>K*b@^B%A*!wm7bz{w3ehvVhIDNbZ*{D+nBG66QH#5+n*=hc zCN|ZC!kU}LCBvfdmAq?$x!>^V5u?fSlyEj~e8pMVuVgRooc)>jHWAXI~}KlnB3#tPx^2G)aP082N3 zMG7v0PX19|8Y3`xS*t)CWUM(y&I=!a%4rk`R#ocjJ(XVv1RQ?2gL4*;uoTRFy*npg zVY|SiTk%ei@kx^Z2%Dtwqr7W%^n_VC!eBdSLdz4y-_qSKL`(O`O-Lf{L3$|mmdWcm zeRri8phN#Fzyy@`3SOXY3hChR@)WGFXLw`l-vj%SzVW+vg zpj6k{eL+=xIYsStN|n3;JmanUVIpEwZ4lvdX>U<{ji-nCKxF_=U`;r<7LBt*j|0e1 zqd+0}bYK4+@@-S4r0Ln&`G@|NAiZz3HAuC@!5r2Xm7B5m74L!oo0SouH#>)ldR`{D zyVDFTb*QfG|9)J79J3ZW?^hJ#)G52ayiUPc;?j9$0<+Wwf^)mRfBo?3l!CLkmxNEx z%$%NShW_b*p<9x;}vg%ai~0S;-NtroO#e{8VI;ZuhPxKvq(|(_4UDw znoBhn7uKA~u$cGd)#chk!wrUo^7o22EIMVcl}$xm_7gx~JmIDr?DgIvMQgmID@7)L z>hJdVy0M4W|GAl^)Tq3uD-``PwRf4qww{b0vx05L0aBi@f`T+W7|w6Id#gAYslW89 zp-V~MYP`tm2=DZCaZ!)+N7Lx!!4Ed?5XF63rus03NkCH@TZN+%xZD*(Nk;-9d1app z$Mj!0582?)-_ri~27r6?0LDdRu(#Cl2_5UMOqxz=n|fE%<;8ZLQpKTJQYjM5&Q_3r3848i13A#XSZ`ZjdJUvY>SfLgd&Zr79=;=)E_l+^Bw%(tRwEH_ zPD1SSJP540wCM3S$&kiwXo4aaOhd|3kf8~vF`SUEPz9Qj@kueO_`l0C_-+^}`fhT6 zON(D^?Ok*H$r6;$9bm~4*>vCxz!sS5xf;sCVL(9-6BGL&Zk3;1M$EVOne+y*AJ&J5 z>%5viU%vk5L$>Om-#=j=)=x|O7~kyNp<__7aQj6MZ~Ywf=N05>vyW83$3IW`V{K)r zCj-xyhEG~*;Rmn?ez4SH3sS70lcC3mI;(3KNiXE540=*={^I={X8mL#s&f6;|D%(aGE7mSFEa>A6&QkE=BRvSKLmnBdvpPX6{H z7?DAsemmH8xcRe=Wrc}z^zD#u+G-Rrf;`xv-St_y>LjC>0)4rt)k)V>0P~b8X zY@kY}O@jr?sg%GWxH4dO>>3;OD&NadZD$zOVxk@|w&7^`D~edVU)w9FIYO%~$KJO6 z>(>3_4yM58PcJP*mKFslY8EE6q-b6DkR{QSleXfNvguwUg||BXEn%=V1Q(gTjMj3K za?ov*SM~_=`S7Ig>=VM^zpN!a`Bk^udVPEKUFpNq{!R_0%=MRE{%mQmRvmM-e1 za(#X6G|1LydO04y{Ou1)utjCoU%L2TiU7_J-{nDVuOLlxoPiPgKr;!VZ@Pn{;8t1? zcK*ZqN=d@t_qPc;(^B-ihTEimFeK7gAN88xoL7OToK}*5{LY*?<-4$CVH+V^cKA%I z85z-q>P8{775(IwfnOIM?@=D|1azdrikqG?Bix~v&ND=r2yDvc`dFeG4~ z>0nE>RxBtX-fY0@0Owg`5TqM4etY#xe|UX!3~77}t$jf7{2lTyn-l)2!(myzPm&L`%DRkON()3mo|4yA>_t6_fzhK6FMA6mc0I(Fa4 zJAI1r{B&Tn9K;y&;83k0J$K1h1{Qs6gXGp)g0X$f@dL>3irs_kJ$WeK{|vo30xZa| zLn%@N-3SAy;~(zGqM`HRT^rR$WQ>hjrJ;wTV2cc-fY#IWUiVf*r>6w>oSxFKuAYA02yE?& z5p~wcdl_^ohQ?Mb*ffPVwXT}CAsMMXW8Pi%yFcVPzy8NL!swvngt)t#lrZwifp>T{ z;B4p`H)L;xdioy+SRJmwnE|bZ+^Q29piTn8li3xvEIA;hFMbgiP&qf>Gk9N)62f)s zYyQpVhDh{CqVyNT8(xw0y#S{_l&kwp7Vg9_9|Gvh+Tc0UXe|tjUINYn$0X=i6dff5 zJ_2y`v%9Ui8FVn?s;##hM)9AdPd-k!ITcL0I7=!^dn zg#T-R!FCuC0zU}%b%HF30pZjyS3X&AK;R?pHI2PsgQy39T~QCHn3ezEK7##rM+F<3 zjhJ1-=wUSA-r*3qx)#yHq;D4!=n$**?0n^yYHQ*JKN6icj zpzIj8WqB2>yHhiC++Fm@;gx@Z-IAf zF<;PdMKb0F+(4_z+QeJGXac#~$(9-a*sv&0{l84W-?cK70!U*l%FtdWREGa{EBKpT zJ!Bg(ZbJbKA<2Tji{XE60k}KBOS<(K5dZBlJVG~mVG@}aP=9{^`cSyXv1%& zA5t~LZhqAK*t?PRx?>jrJmC?V?SXf+m%MmBc`Lz0lT(jgtqUwaf ziBNR(ajCD>D=49fSNY$x{>M{>Gtt3j&=VNBdNSKuq&}drXGy{7)5i<2w^@@%^&Ja6 zi-97uxz$ECfyrq((AMX-cC7SeF3I2B#=m>fU|FO8FdC)p3E${ALcx1ErucNZW#RU> z8)9!eQt-d(6zadE4(G?Wwdjz>WZ({sCog<9V})>&fDw<)|1BF;M*`N95%?c){nS@F}GJx>--MyYdH|XLT(eQ;J>_S9OnovU}|u09}8FG zDCzI&?cWyipDz~sNjSg^Xh;c|3hwWay_%Vs>B93mofb8Mmds}-2n!x`u%aFdjb?QK zRr>#W%YZ16cXB;OX8~6(l+QFD@BPB9N@x*QD=3E6a@>j)hmmXT|00hPGXDq5~AM! zU4emaYp+uK0DRit+>@dRlzmQgbaY$3y-nXssJc$p#kKl`R^5NN(Z2$2cWir`bRXJt zVPaPWtN9V#X0$+Mkb;Bj(g+U1A<=(Un=sh+c8RtMN4_E68*~OtO@;gxc_g^@{1ht8 zEqlZgsFfKfQrQd0iEY~*GvfbF=?<(*9WUR_jLnh}41Nbz4L&g1!@a4_*#P{v+udWG4e&*GyEH9Cc zJRH2ZjROB60unuPaC#^gL1+M|;4Uw$OG4iS+Xm9m&`|Zkice=-e0Qe1W_-DVGJ_=i z|5BxwR3SGr(=(?x8DJ7-(Mfs5HFrtYJLob$?YR9gLw6?NAEbZ;`M`%hCA#ZUYwbIZBmrKrc7& z%|f91pC`Gi)0wB};PMRTyWLXdRS9VIOzI=?eQ4g-=5x2`qw%0H3}vNKsQVe1KsByx zy1C*c0a>QQxMSw$=NA`4acpCoM?DLv;UHvZcyx33PpF+QZOps6cYD}#oCu~&_&tO2 zbPF-1O-S+G2<`T&GsK%uZ7t!mUHcJ)WhZ?5j~AZ%uhh97eZMLrzqr2ke0_P%M}%7} zpB561MoFi?Enclk;Bk$^uY!k*c8Xa?6@<*8jxc5vVW8vbcdRg;#>tH01Z{Aem5Drc zPqd53)9VgYn9?G-#W?h_%SKWJ>$=9q#{6$KMy69}H7%bO?T)^nVDY>><=_iy`0f}{ z2^{}oYt!@UID0v3Tpwj7 zq}P)m{R;6F($`dbdD>*?|kLJ3gs=mSa?VWL;?M*q{r6CbZ)`o>*LhO@?9aqwCfPy#*HV-~>t z-CjO*?5bI1!r)sEE|g~asHiB?dqUC=bGh8CKj`^7Xwn&Qp7WIVy4+)al7e{8W2&qQ z7{or`!MisIpeW3L#-_78`&@<4ei?vGvgSu1mI3A*2PVI5BpSFE*xifL z|0MUsx@GOx+z?0?0fM-9(T*Fi;*B0G&+E%J?NPL470~%|im>af-_Pcvg+~}It&;!r zx)IG9%y|qbCIr?24_++7Zjno`w=;BYbyXE$VdSWaU&S78O+5x+o%jyRl*{)*3~8+a ziwY75=;-4yQl4(OGwz2riYByGMkN3Sx<0;Ve0bRP%xRYL1gcWjrY}H=gEIS>8ie+&YbxtK>uHx{ATgUd;sQpGUKeA%;k-_Yxj!lRl11FA~9P^ zbc?Vs$oZA%OK)yA2RuGp&rKgV+8A30L}Z5a(40Gq25rx~k_Bi%|Dd-@nuy|3`xQ)K zUT|gH9TZxYGm498PXH37A$G zDh`eB4ix)JvB42Ef>NNYkwYMU=`BD9I_*MNhd}yJZSv8ZFLp~zv0RyC9#@W-V7-MZ z&Le>uk_Rc4m61F37azf@I!i$=5au)TxROFdPXB3@Kk!lh)%`EJl2Gxd3o=AKe5gE|F zh2Om%?b;J+*3Sy46oxC_Jp1mlL5fH9$e+iotM$iH%L^(2a#T#qwML=m70=c00cydS zTDu`JO!?L;DcvQBS3V`tc#PF#E6Jd}832`iTPtpe7yy0RYpKpWQv#$38-A6i44kFu zv?QFbQdH8#o&XeSPH|pE;U(+|hOIUsjXGC8(BY1sD(DvYf%&nizRk-87lGR2>{T|9 zOB$w!-Nv{I2D0#fWE4Onfv)ku4^~L@ZH6Tpz3`P5p)Di00LYx#f$!DykW+5@Yp zZ=dsNwN2#NuIHa09Uf+b=ANg0DS~gB{ogyB?$RooDyIqs+!L@?e)Z}-c#-ytaYx^D z1s>(-FHBkurh*YUea|z-80sP^?@Z4s>$xmn4D7*W)|fnrYpLZWWI8r6i$Wxgcoyi1 znZ!zj=e`*}A|Dn644FBAs9@;FPJOqd?-Ez0wp5Y-$efaZ%}h3c@EmH!2tDIvk&@vi zD&-3(C#U0>UI!Zau#L=Wy@S>(yFj~`r)hm@N&3xTw#)B_lGT^{-TfLroUB4<#ZmF7 zh5P}lO2<+AQO-8pfkRJ3RqF=jo~SD^=zxuUkBiuH+L?U9An)0K@)Iy6>U_Q4bogPr zKyT7Ro~^QX7$y%W`7%M9j5-)@ezQK*v*&RcXWx%K(eieM?%p(ThFkZmq}k*i1vt!!Ze4UbCT z_sqML` z#2pzM(5k-{dc5Ug<^7#PxR8iZNf}fKJ^^5yz)*OQ@7{C`&hq-3g$sj&U^bF+`XK_P zFKlD)VsM51Uf- zAFm4gGoE~aBWlw3A(7q3Fr|5jc7+?-EcAGV{gLCO#)GH^I7oLVC~h;zUq^6Jy|(hX zhvQ_THJ_|`T{iJ%rDt8E!Sf|hDN004VYw01!Wb_IdRE^VSc!TB0;ZeC;gkY7fQ#W5 z^N3g`n2EHdZFpORMG#Ts#4+i*w`lTbs)_)Q3K11SBdP?9$jT}%EX**`?d8`ey0pNL z4)z2Wa|k51Z31-t#&2^`rFB8yJVH>SMbks5@Lhq&)|=Z&t+a0$II z8quWpApos?u-2vF&a+Ax=Gp`_33P8xt@X#A?keSM8#$|Jv_ zZ(#g!)15;-ix_w^VE{?7?Yjlk0O4e~%En8##-365e6Jp`wgM7OMlCg;OH_5h^1!M^<7@k( z#$IqFPdzlX5^kTmz|i*q#@a|a?!0}8_Z0vH=WE}@_6M{KBu;L|g*)4O`^!NlP<+h* zcd7$mwcXwr!)kkf-Jc@J%V#s2V#WCEGrJ{Q!&PORR`IJRpgnliE{L9vP_hJg-t6emBbfvIU>lf82d3G{;IJ*+%Ki-K7xwtx+Tjk?&30c-ClTj3M z+C-219sFFsCs|Oe=$x(~y;MagFgbQLqvEE)$E4Bw`f#sfVbDzfh@ml-+ zW!BlA!8~;$F#O5jarmnvmMvaDT)2&~Eyfc2&s>A}31hoVz5Ce~r4_xI^kA}XW*X=Y zfpBB|l`IHlt;|0PtGSJrkryf!MbKJ8NHKlWZ3eR`pBYrjz=T#0HkF+{E+xOe>&bTa zSHGA$`O&|-KR%nP)ZaA&<|_wGApgqSk$-w(Es8)V>-+%D>FIBR2hTRNtkzc3SL?E= zV3;^m%wxrKxPfVjgPnm*ry#IhHa~GSc zk6D$P8R4_P%DFqZ=f34|^9c+>l|AUzQ}ti^;dbJgSI1!(`?}Arr+`6ty?4QHSNyuf zjq>-;xW`n3TV6MhDK}poQ4Z$-eD9}#BInIwE7owPbgqFC7qXs2@K>~+qT%~bKmXElnGV|TZSW_vIrtTD}R{-ok6%9cM zb8Mkb`J^T>K)%Rm3A$PS?uURaDmM2dq6SzsDnd!fHFP${ih+pp4|M<^#s1eXIXe-k zfWg6H0O!jF>*aRoy~D$KTMwzb2?rqyx@sN?()p4{PJ7J&Xn7s&XACo0jOU8T>*~+T zJ$4Q}zW1BIW%T)-zXA3|qtxhkqnNB`aXj*(DH_jcX}SjK06=P)LI(&`(=%^&c5Bp- zFKgTb(!}LeSU;g}Sql*R)Lk5A)A?Zz0eJTW6f~xY%*7tzMMmc^Jp#ek8Tob^bds!e zM^VN*(=}}il|MhSbw6nCjt{GS11@GJKmb=C`W&8U9r=fPx0g3Ao|6!9cfc;98W_=>bF ztx-v}F>DU6%7t(LWAJi#^)p9&#?fxZ0c z9Pi%1Pm+QZsmRK%ajAq+GERejIjqMV-$94<6w+;xYMg`LK*26Wnwx`ixIp@VjN>y& z_s6HblaB99;0|e3<73S(pjZ}|) z(f3%4+rygD-uP>roy^^=U0-1~sYIVC$G*(zj?GSAy;erU#Jm03+!2C(@5Y!YN?VFk z$YuXgI2B(Qpx99X?_R0t(2HWeo6lEj=T9aFZ9SQFi;SQWZ4RQ>xW73&QF{f#cNft| zoifJtjvDfUeW7KQMhAYz9IZe^Kg4TGeSgPo&Jj*4PE&W~#N>K95ClBIeixR~Yo$J! zX{ubk2R~kRrC^$52~OKf6`umYY+bjP)MM5?PwZb>N;UI#=ca4rf-zM>;)!ty z5`WEs?*^3^>cLIzv>-a^Cn)&^NJb~pFP7gSm(iQ>4!+k|A+*x00$#U$+f!cLpnTNO zReLucpokKMd4etnE(oEbHysc93S}2Y;2cfyPgJIZm^QTJ>#%_!LJ|l!d$NeF>ify% z=9z5GXCS?Qn?qmQ9{R}!DUJkAQE_Bq8lf``Ccha*F$7=*`5(C>QZhXf8l|cQVR9f5 zK#V3iT5e_|jLOsjm|oMmKJZ&j*2{Qfbs=w>Jg)Z$;?Ri2Y_Xet2rxns39ak#0yfrt zise>Q{7%^;MK438Jzv(?Cjy{?Cm`N?eT0u4%MH(cJCfgo&!k88mOhVZo`q|Fyt-5x z)Z}Ag0ioN^GzMW|*ZFZ&R&Qev#H%u^q-1POIX|tjH+eqcbk**ta)}>=*%<^b?X#c6 zUp9YKsp}(x7j|2U45F}EY?osnX!<_TC~WNO1>|D(9zBAgJwJ-<@Pp%C{&2EG2vnfm zPpKe`D4>_c9)YyxWVQP&CF(2Y z`w4#zKqtlUn7Ah2WmIYv?l9|3Q+${tKp^K$!O*StHrT_*p*u~KnWzXZyt}jo2#*dg zcH9uPGurx&*)*cx%l9#s4XUTm{f1JOik0Gi#)?V1 zPL_D0u@M4tjE)4!1=Jo`CGmZ7DLslmu`es!PUdga>^Yv8_H_A-bnQ;n3_${Xu#0cE0 z@c_xqVh!2@^@^KMkz!elkI9DGa8|**k(3@Y!ay?C5Z@m=+u_Tgen8!CXuDeOD1GqIMK3GL;z@}Iv)-n^`FjawC8ZeY`@AkR@q^qsoX!M%xlApR$!mk3Tn zXi75Y(^>Daicj=uhvcDD9Gm6}l|Ni#v{0w}1pQYGy^Fn5;8IHLm!|{gjvELWrWIqB z4*(skIr$D(!`0QvFYV=5JW`GHEGSglAh!7|h+G)}6k45AQwix*)Ivt`cPC*RPyGV% zSd0V192S?cgeeU7Y;%+N-U4Q0KbBl)>m(kNyH*oraZj@qn%)jfRoQT>nA&UDwpFL{ zJKph8vjqLQATbFmc7%ddJlOn%}CfxCu`82|B_2F~ec2{%ujJK@wvo=j{}YZ@QYVkU(Bkb-z0Zx-cq9Pz;(uas4P|jFMNYR8oZo zgmvZ&7qJg+wq7_t!PXR3c3P})3|)ETF>0w>?|?Ou|7=XfdqHP$whohOP^YBXwbWva z26BcYb!V%?-CvNf()TiChr}atL>r!agK$WYCTwQ%;ak~YkSB<7#<%gDMR;D`H!Nz_ z#rCBz4A`0d(Si(Gj%^-qJ!eExj=J>#_;|fs#>b=w<+v;AJ@IPo_*7htYA*?BRdQ9* zj9v)6+l7l$E==4}k)vbmxZ+9@a{klmFAOaKz-_igs`_(e6-I6LX+OA&GoXnt3r1I$ z!I0LtY5&L6^oMGbRW|YydBpBE}H_FpR+vVEjai zx}S1J8j4t?iaCb7PX*v$g8)6lrPiSSImm3xHJEghhIIiFxi9LU0!3ChU)C!lS9|ar zzc%sWrQrel(6-E4;(om;SUi)EUdoDT)pU8*Mz*!qghuNrC-LR^7YGn;IJ%uIU}O73QtuznfqO<%bM{s=B*3omjcpY|DuY}V#8{Vs8%*>&#r{RleX(r=;pYu0b$pS?B4 z;kq&7w+ZjkZ)V?KhDbnf3wNJEZ%~-`GQUJ#3s`zGl&gh1a$4E;X`yaiVBt0YJ$_4; zqs@s>@%UXq1pa~@XjGL43?RsSlokT_yx4ge?}LTqFEN|7J<%=6En|Q)?<2MQIL1t_ z4Q>AOObF4Pt;ugoFh|=;#ZF;_Zblk+fyHRy%QlyKl~l1dmF$!n`(I@_P`1&ZX>Qmm zpFqZaw{IT61(%}H(k0D5l1%&1a3k1`zYbX8KvtA*eWSP`%cw1cM{*RPt)H(X+l>Em z+-;~0eFcfUW@_CZznb4%duWusezQFuS>P$t5=8LHAM0+( zQ%vEw1$V#ww}bcn*79`@c>fIM;1ET2nxKUR;nFm>zBj9Vm*Q**$^mQ1HuXzO;>CNc zj$8ZGF(SGBDLGOYp|&^P?tahk@h=F(PQ9msxWZ=z?FYbsZXO^LIJOM{*_kYG0s%_i zU_3hvBv6z`@;#bcrT3S+#to6%(=JX$O{Iq?zZyWgDN(sH!&^kEUT|C^vCJe-yFV?( z_)hCAa55$`37GHhf63&`Q1KTa_ryKaGdHd*nQCsj}A_vGy zZExf2+?G`ZC z!|KoT{Y2ww_E)cvo8(WDg;ICKowqHSo`KG;K5ROJ>l1M=5X^pE2V_i6%*o?8?UbEK zZ=U6HONN!g(Gy%&MI+Dx$;E-Uo$JwXqw@P_egDbjO0vpuz9w7c<7Fqw3&Ke(Iu1!5 z-(gvmQd-Z(Q!a(i0RIJ9B;?;=0ibPsDtzw|EYSeYs%1lMTNJ-b$vhci#2(j5>BTjW zj}><o2NsCQbpa8fZCJ<>E$d?^*Ol2$#sn{ znzL_cd>`&)b#29av@sQV;W#jlssW&#(?TT|4Q7LS4Qak=7mE?)m6QSkv_V?0%dW~$ zL5SnlP_94>Eubp+EaGB6bg-)RbNz|~Rbw;Cz*R@DY(pn6y=c$jP zmbw=TTiDmWRN-f{wJthaLWa%gY4Vxx+Ju6yy&oFRtc`(+5*|N+L3n_Y(_(9vPo~}s z#YMVTTFV)lp+4M>%jQYCSon9REyIRl!t-N#A|VF3Farqr@16R!Kjk}j>U+ftl0OW(22It7G zKRfljQ2rjp1nrVD2&0$JWvvg&LcApVy@il)`c^)ud(R8<(C4Ap%bbmKj_Z>Fq!0D> zpR$t{jXS{0>MtQhWApC-+wGHz9@8t55Lv>JDehzO zxhK*5y11cCiu69Y!i%03ubV3}lkShH;P$G3?h`D^-Y_ z<&ZeGguGTn>k8exw_XY$fiCSY=Mj@36n@<)LDLU_(9LVf(f`#8uy~%;001&1jy42d z?-S*(fEp+Aio1`yO6mwF3p*K$t1>E{c-817I5uiPfE_8Y7yIC4(xOZI(i+Nm^#CY& z%&tMgB*wv*@S6iBty5#;XGJMBr-HZ^q#DR^c96KR^&D+QQoow=_!Xi1$VL_be=Y2A z-6>T^bJEGzpum^BIQ`{S_(0n88@LPme;sP@%3 zbH4g60q6r3&j>>J3!BDk{aijt_&_X;Z<#fz1j~?}lb|H5UTL|*bDi*Fs=~VWKw$JS z6#;~J=~wOgPciSq+g!da7Rcp4gY+=ss=Q)@lOHV9 z;r${T`mqVh6-Mo^A5GnAy9`C;H|;qY&Uq~G!x!UcSO`ffTcfRW3XVK7H)3vl$ETlw zda_^T1tQez$`Lh41ah|fJ6^vGY1Gn=S6e_Sth*qyK36Ob5`IAd=J#FgPL1JRIm_v4 z?!&FBm#5`(UUQEDxY8%mhc8NyXg^LQy;B8|NmyLViOtq?cNs&<^3lgs5_#~_lX-Im z3PSdO_djm1npK_y_33a5-iSVZgtT_CUe3u}@oP+nt?wfmU*9_ZnJA|WCpR3%R7vBU ztjeXCaT^GuC_m+mqJryzoszh^AgBmpig{UM3cVozY4e|es70QQ$W4#$yAlq5|^FV^&aN$ z-Q&-IAhl!&3ttG4hC}kRAOF$v2!tEWUR7$=r_l$JFWrH^`C3M&dRN;C29QnzQal$dEe`zQxK!^5jF1mr*J6D##KBm32G{2soJ@}wEb~Fh zmx#D6G(H=ELp@6sI!Xxkg%|>GYh|&Y#1vyZnNZb0Rh0xBVw6tyIsBERU51PgxS72)9`J-}c{Fmt4 zVV3j7P|k5fybe+g{qdJO8$q_J+G_>H_1ORyykyC}HCWx&J8k@wp$^8$U$eC0u{&P$*})TAVt?0 zHV{lM-x)tzp~E{_IihB7HFHiGV-7H=%%18~EtD{@KqSJHQxIug6Fz9sQ>HNCOu$qq z8^@-d0n{+>8HD-4Pv&$D6Dhn({MCaGjNN%M?-vXE(0zir-|s<>dO%a`I8z2W8Y5^+2OJKciW{~YlToGmYRq5;YKPt^e&0drCn*K( z8I+4bAcOE4uT9xA?%|81)hH0wmVUU0@t_%?kL-O8#3p&LZ}u!#GZ!6&C?xuD*|Rfr#fozPiV(g@hBfHyQLuZ2(AwL=Nwixb?N~$#ByYd zJKl?c?>PAn3Psh%n`-cLaIyI@;G68ha)s)0G%SmysPyPa<`u=fkPKajIwXr$2BL7( z72}olo9!ddUMOV$o1tI(@i$SxJSzu!n<2Oz94{;VXW5=6T%aqF5{H=;mUoIi$fVVjq0?0De`%-G2r-p{pz(;P|O+YQRG)C9hfdoO@+b84f+jvjWYg+E{Su0Of zZkH^DTtS9&>+{+w*D7A=c*gLYF%>ao!b-}wI1w{W=ryks(yHP38_yogpeuI zpw}b7wjfo=+4L@!l6owlW*dmt_k3d95m8B$DeP>u8h$TtwQBC(vU&N+69l@$s!e@n z01oNicr#TZ0Gk!EH4wMKKHVs0P#*A_S+J<=<~36R{T($fM1-T({+X>zpBCGc_WAZi z&~kW3k;{m-13G#tZ#_&qEFS!RJb2ase=VjR24$n@<5-qARg|!vgp=JultDaQ( zBmUlai)Rexhh|9gP9kw`3w30!I#YSj1ke(!+$%{4rA9^k#4B4gf%Lq~&DRp((Km2BsQ zag+d_V8bn@^5$y_1+l#93{#1TdJmkSSwc+`1;lc%PMs@;aD>DM_ z17>R+qA{9Hdf&Y^1ccq8Afr}BkM}?bVV)#l)2Cu?5Rf|XX6W-$YRg(A_;U5q#kPd! zutiKqP%B{)9-8wI5Y~REnZN9Cb1e-pg~&)AYJ~xOS^4gG%-?S%Ihg#heWgh-GUq@G zhqCz1B`;CN%Hk3-UL^&gnDaP(DUzDRVC_X$ywA>phU{7a1+S^vZzavsl+gG-WI{b{ zN2D2M6ye+6=F9{MqecoalJDYXE?`$k8Ing)@w$s%%R3X!K9i@;Fp-GmCOtz~gx}G~ z4UoHubn9Fnvc1fpLYu%?Wh91ytkowzi?OGxDsv?V(32oE0)Hq~^W7xu6I3jaf7HOY zxMN0xP4%IA)aC_$sq2$qDFYN{YEQ0aK|B)t<|I(z_uHbPYgO~^Doo@}oL5eIq*!vP zxdIE8L{AV{dJi$e48bF&!jZi*ObF+le@<)rmUx0@hDgaT(TU>gp3YtR8F)N;(`2rZEA9*&T-{U>U6jp&ddKX6(+xM;yd1V2pOZ6X|lV#_4NEN^H zBVbrn1*xbs-i|ln)9(Og@g`ye@Xdq}!fcXrV#B7AshbUa{XzddBJ+$+xThe>LUx}_E9udfwLWTwRoav>jD4~!5TOp_sT-g2mdb_&0QR@> zmi;&u^{~-m{f>y0!JO{0bChJL0cXh%&~jMzhnW98{mItM59WUD5@#$OXTTKUWYjVn zE~09RF)sMvc?L`$?HHQ`{Ty?FAtxi9P|mu4R#fA+U!?DsIe!4Z?MdahHjv0gKtP2U zPrJc?&8zWJO^E)GnjgcP*yjOh#3&l3SMJ@m#}hjNE|C92(|Jd;`M+=62#J~4u?0oV z*i>r7>ag1ytrB~WQhO6()2h$-ZslTon@14U~AO|2lh(S8nJI(`9Hs(stTwGl;BvCxm%4R8Tz6)D0I(7y0`ZbBSt z#rfr)|Gv5y8-3MRwi$6LxA@*bJ62|4FLleUvF@h&*{jlGg|IocFQ&CvieI#xKx)EW zmG7^zez^*|12x5+fqqx>nx8G}n6xYZ0wfKk&l+PS(zG;PC*S zOH@E)_fvCrP;k%78>{&o)yT)NHt~CZHrBLg z(5lew*S8EkWGa{DAyhblY-oq50X|lk!DSt|wwlri?7!_t05XRs_Az8#dQbQJb+cNc zb#Ce@`}T`!s&!JdK~#RvnVwwGxhj|X0B$${fMCHmIXOA<;2+LnuKZqF2b~EldGbf$ zrdw02L-*2vOKIXEC0SNP1@pz#`F2yx{a8#fUFEd;P3k9V#HZlH2Wt;IKOz{5m({R* zFNsEV76q`k?}l~&VG>@ZYK9hl0si7`_9OwF{_9^3faYZJ-h-pbhb6vlkH30}0hbL! z+;c#pK<%tB*kUObbSRo+y5usNg z;j7Eo&f_{SyPYYY6s~J+2R%N1A47K%G_Sa2Z6Yv=XxMaJ!TLvYl|%Xio>%UI$WEU` zG`RAlMNu*LchyJLiW~b|Vbq_0$2dYZ56?6tD$sf4BCHBy#3fU{kA^3l`#5MMqGFw>u9_f6taD zapu7uMDiAU6Ov^D4~}bm*5!;ncw95EPfDJa$d&w^eMr6wC@PfSJ_Er{)Lm_J@!Whs z_nA%C?x@(4n?OFdPB@-8vWcM{x3PuWhbQ|e(9n4~w4VW_VI>RBC909knQ0M9;>%+) z8;gBF(-6fb=iF^e;{|k8a61%75mI?vHLFfea@ogmIs+*bln zXDWeKRZN>C$S3X%5d#w?$N6{;%{;NQG|XIM3y=xjXbW&KXS8axRyYDQeEd%9ch%B; zpmnXifPzd1h@b~<(RCa5h*A|jA;j9n(X*-Y*>zMC_!v)HKoMFr7pj-{t!l4?KI%W( zd}0<+`+UMk2@euf@S4UP%Qb5&i!&-TmpnX8eOu?)skC>)gyD=;jt5bIEhgKQ_aDYg z-xTk7fn>b#rv!qWD4+mKW*2)#zMH9> z=KbxkS%4BZO@i=(HJBl^ZO zO3kR0S-eR(&Xd^W3(`f5C%ktN{Y>aaiUJQTm&~2oj{|JKH_WP6yI;I}_`W+RI472; z_2Y%i8=%DWu82vgZIw|cKe`b8G+?exQJ_ha3}YNl0SrT77bm$o!4am>d&Qn}$0Ruu zSBu*oHrhb#TZWKY8|c6)J!N%gt_89tn8=xjQ{E`n&NS$MvLZd+5WpF+2_Vtw^59IS zZ{2mW!6y))+JUbmGBMaY1QIVvF(-8T>Q?B#+|X>~8{f?-wWS$(j=r&S3xNb2`v?8J zxL-y3nt*$KWh%O){BAfM**Hn}zk!@VNn&GMeqtvom(Rf+c*xo^z2$j5_#kfmH@Zhi zU$PF`r#?2_SAESQYC5i03bbPFXwmAb2iWuZ`3db;EI%IqsOBneip&Gb$_GC)jnm7y z&7K*!T65ja5Rba;%2fdXr6AnhC|-EH&@A`^b@mCA`)tQNMw$2pZh&^hX3?|lakjz> z6AZ14ym4y;QC}+Jwuqw|NbODHDJ>!fkV^2UL`K7(L!Pte9rKGECK!3ifO7 zcm}%jku|k!oG8+!f`W92*x({RyjxwUgZ{>SuS?h#qpcA@jd0LIZ8OcDLb9z?Is@-C z#k|9O;a0iQ?tQt$Ic-bSw&p{DEdG*DbiUg~!7<9}(-K0EBjist+EHCzonNVvh%%&= z$IN?Fg+xKtDqT1r9b>mX?8idawJPVW=T!`t zDhsw3l!@kF*GJ0>^goC69`+G4Jcyc7PaXuO=4YXm z6V(LJvY`*bAj(v|)^(!aSqIUYZ>_$fhkc}Vbsx3eHVR9wG<+veYCD)N4YhoNPJ(ht z#hx8@R3}wNkj9_qeDj6>@KdvKBccNz&P;kZ)DpOVEXM-12hU;+7f(JLZ`wSrz^LWV zpYoU2!J%*^NJhdDLYpJmCpdRD%MmcvMmTD=GEpE)gI+@n!ii0R(`2OSU>PU`0F0$< zk-V_3|Ll;-KJ{|PU+H39NvN6jc6Mq6AA;|Ky3VeRu2VQa8JJ*!M9(gzHmbPDK0{)G zAIw|C9niL0_|y1oG-Q%+CmgSqDeU9?jfDD?r=p!*sY;Dj#?dszuS(wOSFC!DmA0&x ziq)~0uatK#&<5_b@4>zr)ywTQP4At{Uqc#@T#?11Cq8XPAb;7-!@xZi)~XaZ*Bu^p z_6S(~T^rbH`=<-1zbS3^dBMKYiYL`+DrlXEt4L|GL9Huoq^kShPynli|3~Np+M&9i z`^+5Yz5zXt3M)r2y+wuDzWv4pKrhe{(!KbB3aB@CY1mqc>r*Dd4j*<&1;cZc0bs#D zLoQp>;6%n_wEre?%vl>AOmYvx8XRMwt(zsY9Q7=0-YMMgMnStp31d0;BnStI@hv`5OK28PC z)Dc!5vee)Gj&(^F7a?X=q$8Yve1Yirh13N4dz2y31}`c;00nHn0ju1@GIK>^q8N>E zyIxxbr=Y_vHFa|6M@1SM#AAwRC{1fHmcA8ya! zD2}oftLeIFFiANlJX!_!QgbdK>SXjpy|=No{i%|CxSCg5NP#*x73`UBGp)w1<`wSw66P`pK^pQG=*6@LQQ{0W zyQQCHM-q{a;-K7x7pMD%eXrdVW^7iaJ~;mS`&H)^uPA$`boL)d`DOX5U9wI-gBP)$ znOX8~*5AVR{i4^~^FSj{l} z?zenM)kJC*%^aAfiG+ljgmTbr@gY44wUxAFmv!jOcNh7z6TqjkkVIYKo#+F+IACqr zU#&Z&&UV^U9JiZ8%%O{cGHAAMl~vn=Ig?mL?uF5LcP#4VIXh)V4M53<4vM6sG`(mi za5M};JYwskCHH}n4cl*|X27M7M7>^(I7X7_gA*WXuZ~#~o++r+kR9Tr=+|^zvBYXA zk`i;T!ljohy7Y60Q&cWm@3EC`X8K8*5P?gd3PH-Le?gTd)r4o+fH;1;5o=S53P3B| zVT=0;e*e0-7~>6*mbdGD2#oPJJbYWpiA$NHDOr}y9o|&i^6IDfqsH!>D8hKh|3pOU z5}VER^u{Y=-=*&g*ci#lW2PwQBzhgPQwWqW_Z0_MUbO$yQdSRiIIt3XearVp_7R`R zTSLEbwOwmXC!jH|^p+eWrgKX8vpx)KAZrsv2gK-{JrDJ*ejM`VkLCA38X7j4%wHe$ z?^{qLB|+(X=_pGFb8b~z)BN}lj5FzfFwP^nc5e6_V1Mrc2A7=M-Vp-2XD^yBs`voJ z>}{395Nqad+Wk0oh98dFgu$8dxaa*!SD{xZ?N&L$&~^GGiI4PX^{bl?egVnbx*&LG z4!c@hN+6<0jB;s`YK=al>9cSAL?VM z$#$f+-I% z1`iqv#FMP#^-z?sPitGZLE22k#wiXjv4d zGGZK|e-p0Iz_*p8+dx5VPDdjm3O;u<3yFEac|(R|W70$EH$X=_oyOZ8HOVaR7=AMP zP=On2`#S5W5kdM5aMS1ryN_2y&~je^dEi^xkTDQW_~med5wTpH^`O<#cqqjK5Eqo`%G zD1#o)d!Wp!yYr2-y(e4-+f7RByFMz`7&e1b_fT7nNVS!BpTYniM{VfkS*5xd;HmlZ z{;sq!dKM6i$bS+a!d@Xf`>k0*?vT;80}?>hZ}wBWRC#i_2!m*1;_nSo*!K!Lp;j0@)oGY&IDVD3NZgZL61UDQX8cMWeb@iaA6@jfTl5&f8*LpqyEel9 z2Uw}K8kmZhT}DJU1sOb7J*>Cu^HqA&=s6c`TywPbFTvYHt$^GP%F*NL8>s=<47iqh zonR_-YT>RiI{wk_B0bIE_uy5*MG5@Ot=Ied8x=dO(mvl z^0bbYT4(}C7opO5@S4xI96OB;Yn4lyipw*|lKb)oPBKAq$c0ot^|_MxQ=!HOREEjZ zaG^vqdq3%XnlE*13L$VU(NZfq0vQ0W#6tvnQ^jQi0@R3^B|(`s5Dh#Nh=HXTm8EuS*8OH^%(6XfQ>x+2>nZJZ?kYgI_JfF4F-rNf$8I&89vFl7xL634=Ml zB-9AKUDnavYKIpKQnE~<4^s(p#YQh|7+lj8Y|z#^ik!QF|&kM)A=rzJ87VJE-3+?V&5E}W^o(7HUM#mF+ex?@nH_RR4R>OTOBC4YwX z42ICeH`*+a?wz`whRIg&5!4wUI) z_I^}Aju(hgbKQ94mc|RC1m3xy0)$A^AaJXei_KoXU{YMfMtue>)>99`pCa_iR#4)h z*1vDZH4MftTgc)w;+cdi;z_KgY#o$3xxHq29@6`&wX;S~Jw!jhQ!JuQmI=d*FHw_d zGh*na2D3fxa#+~)zA9+PrFh!hsLRAoyxgA~*UymSRBD|4qbV-t#NJ&ASTU>kSe2S8 zpNtti1r4W~vaRFFN(%=^nxA16A**-$moK zbZ;R)cskWT#HX7gKpkl?i~{o!5p{mPh+z|mL8eBxX zXUgaY?1Q?+k5&J+PE6J5hrLzU)gENOQBVkIF>ulA?xj8!#UjI>g&pOU zx%wkcdTJ0IP5BF;SjQ`=0MR-yawX~uerv8`5f=O5{$zWtniwRKC7!v5SO5gy^G0Ya z0EWp=chiq4{I5nImp8UD+!6~~u}n=Y z=h0`LKpS6;)q)nS%3r`o*g6%PAAb*1(V=fz2yCB2_~{1rrw;%78)+dAUn0 zx_YDPH%uRTShtuy#PU8CKp9UQmRl=bMeL4n06abrns_?fj#gabRkhxTZ3mo z+XdI$L}~`i%B5gd9^0q${Bb=qL-I&QzUF<55i8}ZgVkS^zkb5IZjNt_ zmOW7)`D|?WO*2-35j&eLY4h>v+>I>s(e`x?Kvh7}!&*?cLl0sBr;83C5!NweDX`4# z!UUKB)cx98oFPk1xs{4V{6VcNkEa@AUocfxTWpSCd7SW5c`AwS3+H%Rf3BNxRI@wW% z>hNYOBnAp`axMnC^(R2KNwH<7HyD{`>Y z0H$TU0<-YL%=N=QiTz^3cb)vg7BDQ&=iQA&2eDF{@&4$*9xW1O>(eDmAsywkS(kYruK%!fw=87 zHeMIa?Yp`s2B2nx(+-fhm{e8VHL4%6H0(~HA_T@$oB>?l!S7c$oBAfYnbdWDL|jDn zYWeOYyd^DwaNm|flaF{9WQ4wvPid#=(X%&v2XNpwGS5^w+@q;j=4%lk-s}gS?^|z@ z5)@y#b*sm4Zu|#NJY6c5<_P3zt+>xwQ0v*mU)(k5T>K$be|aIL~fRrL52{0xV2kO?qu>_=Rh;&puSb6``xdOfZLJt^)jP0H)nK~u^eV42^u;3FF` z>Q*LneSb|4pO8^Om@7zp)y$Mg;xTHKbB|3-#>o0Y%gRJ_KhzB?kr2}x5e9>2yxpgJ z24E0Gnr*Llm`Y}Uzu5M9Sl@T=+~aIMc_@29ryADX%N-`oL))~s=i-GY>n7d{P$g;) zt;O|5uieGL?u{p!hBKYur*%zlKrKu^xF@SzH|;+P*9c{F_@|roj<$aA$Zc}zV^$0q z_GYQAWo_`A*WlCp7*DK!M5PNl?r(0m$GWqRzehnBcfTSWk_0jYaFZ6J7YBYVN{^ba zuV&cDX%H8Ql3Z4WnA9KfM;ItFHl_O=DKTY35^y~ef!=>s8gwLw4}m!Zq!EHjCX)WZ#cvc?^mU5|7@kb^lk$6+GyzcA&`}^AV5v`1~Ln}FOQz1mD&2iuqZ_6R^MO2^J539GhP(Gm4Rlx%tAr@?(SBp zpDd$eiLu)OWxc@O6sx55%5c_Vx{1Ze_D8)C!~aS_?UmJ3)MAm?78w$F;;E1G*rT5x z{kEs~Mog;hbnQPb>$TgrRa~M83R_3B&IEIB6ZP?2yQaw-i3OY!M3S(IoW= zRt2fjBKtcst94H#A$!2W#{D9R46+=$APPb;WQdxTa;YsVyMXkoXP$Mb&8AsP&?te& z6$wX=^y@y{f9>pehc8?$bE?AXdtjd`k@_|I-_P874>`Bdhd^LJVNvtZUzL)v9zYI! zBoi@?kz0hc-986Ucq6)gCWjydD-@Yc)%Lu?0ReKy&feE`V^kO$wo;evVnCa@K*v))rO#NFnh__^SJZ=!Dfq$xus-|Lo+poaFmAZP6cy2-2Q<0uMa^f! z%5+T~8LC(kG_MZJ!#?>ZaJUFqmavW;&-mpvosMQzVXh>+`o!d2lBFOoEQ2MVd=ILu zCp)G>9rK9FH0kQzo(90*!$wV|g#h9q!+L^x>8<_#YCE7vc<v4gEh*4yP&)MOE_@g6x`v%?8v{dl|--KXmB!U7q`m+2 zZQoSpmXZ5fpl-@5A@KJtc`SLyzGR_LxQVkfX~5a4gUn;1HmA2c10QJT+39&AocUCV z6S*)WqZ9rGnKx94MeVb4nNl8+Ft%EMT{7FQf@FFIp3vHm29Uz`sI@Mu2iHV8Gl%P+ zsmoGgq+@g-nDXyO@ymPumq}xbX9)Svu_QKPiBfkc-Km3MPhg`IG5YXu;0(-f@N#{D z$!3FLGBJ3r~Kg8+oDxx>&5p{Rx61RD{6gY{NUxPpcCL6 zL>>Nl(D<0GkQLR$W7GzFSa?{o-4v|zFr zg{X*Or(mKd@Dd=|=+iVCBwfbsZ=Y~H=5Xgj#+9MB#}|w5kIAXw`Q$#XohLMvjzDc= zpdME0hqF62D>8!JDx9aS^|#Y#P_-=~Ip?h@Cp%UT{u7fJQCy&KBa%K}$b2Jh zX<`LniC=^{#GcgZBn2Va09&cU8X|_KPzWoy&|%diw!>-6-yTj?2_yKjNdl@bpR`3D zf>Z)tYz4~;?6uJJ*sD>S%9e9`OUaN%NmpU-|25))R=G?*yz##4%3Wu|8BF$@yfMw<3#xrXk^Ssre znd_aL?10V*;uox!Y`J9JZ+s6&O!^3U*Xre$ziL<(m#yl$9r8kWSu9g+1V$lbE!K&M5F;Md12!TYt^X6Hi9^ zs1*GqlcvNXN3j*qF^glyQk2gg`bB_OW9j%Fog@pFsL83^xZ7% z5iJbnDZW5PK2#4RMwQ^iK#vz0FRpOgOKlM>VANxkvlI1(gOm)iWp+LpHR|aar+2UC z+7iT#pjxgwv8wRf{iNM|F^W_r7n>-rjP6FCH*BA&3&sPFHW1%~jt%uk0YlE?a;IFJ zj-DDr)ldKDZ`a$^?K@q{dw`IorO`?EM}{00LCD~-Pidk>ms@@>^Gu58S4K>(?|Nf^ zLib01?Q4E}vAvVL@{P9?cqOPoG>N%-zcG`ywDaEOsi>zd`If9@n6!3T^O|#xi^umR z`+#`~2H?$7zWA#Tyo#pskGePyYe|;|zW z4aldYdJo;!-lquNJ)M!Q;n7f7h>(3gt}7Mt`0kUj5)%_mGl&g$=uX5kVtu?IYPzLT z4f3RB#pKh|qU6CGxb(OVcl&oJSL=!r%-b_B7K96{HFqy1< zr_99(lyYhCV-;g&562f$?(W+U_!yEk8PhfH&S`VK;BLNF^PZhkaD~_w-Y{%$fw~86KQX*ZqG=~G=CAa0uBmM#hpE1D9P;_`YO}OMbAgp1ckm8^rYB+K zp#Hf10gDY2Z@Lppzt}sie>Z{5mC**aAwIagz;Q-{1cF22%W+$o6~(*7rTj}UgC61(dks+uc?75ZUKyr4HoNxIGdi z8vI5jtoi+iq8@IE(|3U?_{r141o7dZwxYZ%UH9)kubyxIH3aak4>pusdRiEt8Ta@`2$1Q0^vV9pyO8bLeIM)j2&d2(5$jlgZTvn4o)jZ}?iR^%CV_eHXl) zgj;m0EvBNy#6t1TN8PZyF*7JMK`^l^f^=wX=PkRu>z`l;dr}Ag8TAWwcLUMCcyEET zU$Jv_-6<+pb>m9A6g>k2qd_-7x3H*ac>GcG;>-JXD;4ZZA3jMqW$H0R{(+Q5B74KC1;zTqb z*>M{1ktW^%K=K!nk*FTU%y%W@9`$n{jbc!-F5UYeK3Qr%Q^n-XSv&{Lr|2W}H&xYe zNoXO5oRWn8i&{kDr*7|v_0}CwF63scFa83jvDM8g4S+pQ?NTb7k+yo7NHr&PO~|dO!1X(_C%5Or^#X2dZ@~xQJP%CmArs}hAO9h`+%(m zLXAX@SMS}u#WelZd!7(EWR;kCg#nE#EK>L1z==8>?fVb!;_xc#s0T1$-`!sxkbT8-#y76o>XbWb9#2UH{0JRy37cyr z<-c~GAU~stuAH{?y)IMN51U{?JUV8%mYu~^yefiEQzVvhgS1QV zTm91dCRR+;h$*X5>m?-=hbM=eC&@9G+uuO>`r`h8HV$b7GuKY6*Su|Ja0Lk9-3J*( zz$IfE0a>c`>Y$WG1IxRIWZ*y0Ii?pa9zDP8q|T;;ma6*1N0hOtNhchI!flvo&vsbD zdfB(fC zv!#}}_Hcc)=ki!3pI2q=cd6Nf+uLo?Dk2MESKhT-U<4O0_p%B0;XvrwIrm?Sr#s{Y$jz!nBGWQS*<^~#=T*JD;s>da{nvX5oK*}q zWW40)rHPQhs3WpPYpL8v<3kWjUx>y1Nwyg8SgFkFS-16rml~&YHyxLMwiTjt*a7TM z%-jC@K@9D;4+`?Yk1N-`1l(eCLylOTMm~O7zqQGt>8 z-{9c6FQgG{(?Uq{cFp!9>vf?N)LCqduf`-sDK$q7cwN+}kAM95xj0-hCRkaEr_+?{ zHd{mNtrdmN?AVHTFtKMI_?X~eU~c?8SOSB}yN>5ugabnff|b=bwxhrQPzu;ei#@4c z9(aB+l%=A-)o|{<3Z&_vc3LlA{$!@o(U2Omb@o?3I2X(fM&I(QYPT%31Rjrl;-3_4 z_>K&7(XS7jOAL~)l0t9$Zd`UC#5XHqef>3_5UDroVt3WM4Gy4?@XY(OKlgqx=&$+X zv?n6xPigk&6$#lQZ{C#H?Y-&=3{yCLpH`z6bhKp%m0Un1Xj1pz2v_iqTF$mZ zv*ZJUTP!^DTWT0>0(e*uX_+r^wbLFJAH8-+vK#jIz=^GydD_g;qkq4p5)mulwBLdtfT4fAHC14nQQ&8F9YY}hE7kf|!Dl6?P` zoG5vuP^5VPge=Z8d1sk4gMxu;i~voP71fAj**{p7056Be3JbAsjlnj8Lp2(_RxfZI z4m-{`mRnfe{*wQ~f(HwBxPIKW>v?InW(d+7vsBR_NKCa1gLyQps0%Dv<|i;cMw?Wh zJ{>Mavsk9yOe-R9qjAxU|2{R)1WBm_@;=tQNw=shgLk2aY#QU!y9=F8u{xqid}J;V z=%CT@GYj{xbO__<9Xk#QiB~*2dQs;hq}3lFWO?si{GiG9_hS>L9M9_?ADiZ2dn?I2~LTmqEi0BZJ_Uhtv1vM2K`l^Dsg^%HGl5ixq?Jw+)8=3UB>HC^GPA!>;VAGUYZ$@6Z_EPxe{B3^K4(S%rDtW{r} z*>H3^osY2lcbx7-*#nfB^B?pHADmGCLe&n^>PW7C6TZJ=!%4Ah1=BKZ0&JPiD}wN} zVf~+ZLC33K%8?wi-ld-{8mvE*3E(hM+yodR6)-xWt&PM&DmqBTI;Z)nO4e&+mL(dKF_uS7;E7jug#oQB|i@gR9O5 z!5s(gO-m21hylqbW9gmn66E7{$2c*`n}#&`-8p0nab_Q=w<$||JK{s&$;iQ7PpG;O z^=dIyn`S{ahS)xuj7*rL?D1D4?s-YS)^XAWO~66-iix@avIKX1K72}Qx9$_q#`aS{ zNMtZ23M`MgyG@6qO1)rm{u=4OG!$1T522$gG~hs3E-Xa#0NBiWih{jL4#UBEH(PFpg= zoXO179SOO>cpXsup|Uy~X0b}dLBK;dTn8)MR#Y5+4?b-yuB1ABHT#8lNDf}pO54>w ztmrpw?#2zrRUkluYbha#8O)$E2K`%xNo^lKQsAzxxk=@0QH7}1IS$S%7k$CQB3a@d?K z3Zc%`hIz+RCdg28E;H8L_suBn*Uf_e}8)d=Pch z%Nzj}X!&@F2IxQJ|y;&@SF^2KffKy=LyB%u}iQ5&OpmVzWyK}ilj28 zvx5W*x!l=k%WNl%>)l$_Up!75amyk9 zsiZ?e?#_n#mA-#yLD;-SY{TFddu?m1a>_-Pg320bd-ue@PW+YH%*kvNRCeRWUuai; zxy=;*8wHg#%pJC*)-XKk8(RM_FmE-WjpAiSR#EOJj8*3g3QekVUTnJt+}=OejNhhIM1RIOh;tA@$>*0kNpOjY^+^2;!PKO_rK=+ zG^s$6PIjQROBVthttGx#A_{dMC>`rhc{|wo{$a$dl%J4#`XMOf>bxo?i4x zTo;(<4ABlYnXUNM({>&QRc{24D7Nlw#6NRbmO@c627IpqLjS&p70k?XVH+dao4cf6 zEk_(OJMn}3RTj*cZK&HgmO=fBnF~V-3)F7x7xzCseX^ai@?^OQlTw+ykA8@CY;sz# z&mkBrC)To_^{+Lh6pHEo8z$%}%z-wc^+R*hPUtsLN?r!@Q#&MXf8WPVgJrIv^1y+} z#s|AcOg)6CpII2W_-#;hGPP+4_3qalh>UGQB3T3?3>fl>n4Jqz z+!R;@F(t-oTu9W&l)e=TbT#*wwQ409ks@F@Rwa=lR$eZOD!3Dp_7p|hPDRLFq>;MD zu2~@o4YfM*N2_T})2)-r5nGgCMJ$)bkO! z!GuP9JxP@2PC%tFi%-ZqYr(^BV`?Z z$}u;F0r@Uhwfc>7m;5zU!M=>lbUdT&9nTL9`I5JbyjfBXVmBZ5J*ix>jb3P_=Z&Ca zdY+!&cLM-Ls24TVzrD>C$B#)Nz`N?uEUSnXE1@=71byjDg@vfcYC3MY2^Kv2DQZf4 zMFd8in1S677AlEPx47T;#8^cVA)y!F5Mbu>tpx-6x{Rz$}D@d~vq1wVgyu2LfNQ z6yp2)3o9nmP?24lIK~AUm}+WLAB+o$Qh!@mZk^uJ)S@Aga?G{YGkma_lD_^ca6lq`xTyHMmP3NPnBm1Q$5VER5vk_)x1I0++X zie>A@6Q*81pqJ&yp)=|iIa0fpT_6QM1)oS*K1?BTZYDrH_C1VOSIIgz!r#NIzQE#R zDKa#xIlMVex!!J_d6VD|T~O>S+P$N79ZC2k$(v=+qF`y}+RNts@t>q%yFU&7M?fCl z+(sh1isYCqRtx!B>6~UON()5>BxcUmc7D|W+ z6_Z2Z60g|Nk5}M2{263ax#RY?pp-yu?nfwl`Z5UF@-WEiD!eFk=aP=l$pMuJCQKzL zM>jyS`#q5DeN*y3p$MtBa^zI89i|XRFYdSK{ASpE1qM;?I^KnSsFCS_{V2n}Y!I7Z z`pUMGu~|J#&SqxAWy|V>!=|D%s4fr}zq0?HRb|9f;TIG-s(La`5%$Q6uxLqH@ zAlk&#`vS}0!0<=<+_R7F+Hx3$-ViYpht>{7QPA^-l6*o)9{r)X}|m*on1OtAhmjCrCN#XjH7J0y*UY? zzV%{C#KE493Zm3haJc zi*N4q2fw?AR3V%u(G9=qI3Ue#ho|eZ_CBWBDpmJX$~uzrp;f-RGZym`yg9$o)w$mJ zXTYC`HLC1i*gS~^`A7XXQg1`UkBhS=W;SrmwrB6E#dH<>=$o?%$cr2u>Tk2PS}m#-sI`Gmgqt&J>qI8gZI0=gBmeO0QJ4kXXg1_A5w4z1S zxw~hGf%}2wZ)V5)2Znqv=e>hyD+z2hQBDrtoM}|u{vL(pss<5YVlN(E!AZBee;=oV zU@H}PI0kfE@W}(^nlE&=8$%nXo&9$9;xdtCJ(svM1^ERrm$Sf~i*?uX_NQHc6ixa4 zl7GIuvK^u#mQ&mRu-dhpJ$UQQIyi;YQ)+>9N!F)z2GT$deQ)?q>k~2xS+MR%Grzg@p|xwUSy^~_#eGwgRm+ngc81;?Y&@BNCnTX|{yU;9sUsQc@sj=5~!KOfW` zyjs~%QQ%(IakZcBnki1%RhFJkyY+mqoa|`>gZH@kh%b=7oilPGb^}nqB78TTDx2K3=-E%W zT;lM&OsYbH1NtuBpN6zSQutO!G=}u;?+Omr4JtWf5S2T=)5nJB$C}Hc+&5rb{^yB( zf2Jgxb?0LFGq(1`h<12b;)+$TJ9REThQsEe$o+niJ_9CKQQ^LEThm+72%A^ZF(Q-s zMzso#$$l1nPP4SMn)ZL`=Ba+%D3YuZRsSxDQlwF~V_nK=OO<@gUZzJ{`6=P|iEW}$ z%r~b84iniSWu;Q-cB(2I9@P?w50LiYW4HE?q6K?*)PFnmQ_2M@ohl5mPhHvyUB<~%YzS~lE|6FUxXM_GaN3}nh$E7Z?&%&uoIBvu}Ik|b4cN3x={ zc!CGA6(8(x^7#D?AkYk7=+uK1SZLtOA9ZsIA5>uM&f>jw6wVvG4bc@RR`uy4`a0}f zdf5_hBsMiU2Tw<1yYzM^4*U`+inZyy1S7MDd{dr zrKDRxS~{dT0!oL7AQ*ImfOIz~NJw`GNO$?G^g#lIu0zWm(Lsp@K2x&?=u}1T@S1fCT3jY4h!g8Ezb_j)?_n8k3;blr;Z+lrzbSX&jQZM;JjRg3 zHsR;N`POS>>#0uxuh%?fH*6R-=)#x|Y!>~y_`etkt=Hb!dc{&}nsQuZv5U-GCYRyy za@I_Gdf>Z_laM8bN$8$8d!a`9p-2{b#xRlk;*^MDQjFsCgQg=fN0bJZH-}mVhfqld zB>X%RfRJDf(s7Pm9LJkvYvG{CQ zet@UmLVAI#jz(9Ah~2NbOXpFoJmEQ%DPz$q%VuojEjwd$k%O+1wY1}zma%#W2Y1;e z31kc4-4LAH()KLSV1569-E;q&@hd?^CE+kh*_)Pd%x8FEo^mhg_)4&Js8yj~HCnE2 z@l!kNmxZ~MD)&}1igV7MESwOdxD6>t;*H0_lxRoL!?YoxwJ%XpnMp_kn^RJe2ROdS z^>{J-jAVo5xZ{0WoKhy?YxAoS7IJ)F!_uz-T~rE!>I;9u=$C`)ATEj8R`Zo9&)vjpccn+3j=-DsP4Evc0%Ni*B! ze|c@Wh0#hKzPBVC7a#?YOoV93eImz`lLxH$a3ZKWx2i2%`oS-=-B0?9ZN3Q*bU7{{ zSqNL`YfdXx;$^`7-&Ya#D?{n%0a!(j)`uJ)t_>aH5$)2CPJSwHPssg-g+&Fg>=7ew zB`m{|Iy?V5vmC{=bgf$^kD<*(0hlJ$)jFE}SPMtQ)!e#BT z>slU<&Idd(4DvTR@JGy>3HJ-p=+|!Iua+9jK{+p&dwf`9fVdBd^o9G zvlBKufTU}!)=iKIL67_IkJLB3JanXi<&!*p>w~~WqPcU|v&doNY`hkgXbZ0DcPP$; zEjcBsl``gt-6s{qh?aw9GY1jP;^viUK`;f3Gh<;&LI3OXHV4+G1qmfxyB&;5ic%JJ za_grCR7V!9GUl>spiH}sCIyqd9eW^1AlObOf&A<;;HD+}2*Msq-^_x9Py4?-P3luE-h zL+jviz$K3SUk@Jyvxt*YLo?$_H5BXgX`bP#fwQa6IMu8{MvcsRkh^f$MnJN;3i!q(ojaJ|Mk%vg|P;GMK1byNn1*D4X)*%QB+1SYDNJi>WTKhADcEf zo%t_TR6CV6k-Px>7kwjWgRL9{CB6Lbjr{w6r1bHC9Xt@e7l{>2s(T$4P8ZyVh&z+{ zilxBl!yryG63wQ4hc_}FSAosreGTc&*AkcRPu7ekyVnmlZBtf9EnTqvBN{og{t7(@ zq-gBrA2v=L{E$c)#_xA{-ozbvY!Fz^q1s?lc=lKH`QYHt8ibz{lB@N`t!z}>wB5q0 zZbd8qFCqZSAc6!w4Pw->YlnQl_U1x|XQ^H5jh0SoQGA+>oDz5YiF#0pNo?1DjqI`p zmAfZMeFINVETGmK71y!|C}6q!|GLR6e;t?EPMNfRM@$P>SN!#;(Pq_~@APDX{<)=V z0s9V{!$#IzDv=vDPdpdDadBXx%Mk#6`uZ)s2os$$0#B!)UMHohJcKRO0gO-j-si@nJ-GbAiAZarlhxG_^nc%H)H4 z(D%CLy+*_4?i(~?_>37ds(zjfZF(GB(V_zvp1dP1mI>?mS30o)4Up@!BfMM4C&D9$h{b3L|tQajzC5*V(h!Z2rUU55_E{IpNUY$x= z<$h8QeJLR^an$HxJiWg4M#PQ&s9?4v|FF{U#B+F4^P~vwia4k)e8M7ZG(b*L+XqY z($eyt?M~Oknkl`Nf!WJIWG|3y=eXsTALyRu> z!P`?22vW;I$f)_J0cIBiK(>sE#5+Fqf~sQl>2eg89(5aW$ov3}a|JX820t z*@C`rP;zvq=HVf12F%IFU@`2&{p==0X zi%3gBL*hP^R|DK=zzYcqM}ZzeqQiAEA`hl!lHe2GQXb#tM`YQp3Ki$EVeKzPb0AJRW4; z{*aYY8|bvY_f>WXdwYjcLPDUv!Qs@d-`K*Ux4 z)+arIs!%P2{{D8CiCqR7$r40SGrXI6(7~O7gi|!6cUUuT`|Krl2vmmb%s=JKU*Zx> zM(SQ7Kg04fl*l^c@T19ECGh{@j}uF|9eQYBqALD$pm6vg+S*|0_)%x+Tnqb9YHAiY zXfhgKRMc3(%>N)EG2uIk(%bvc4bvCog7~F1aG?6oey1-4{UILGGSZr-pB3og2aYM+ z{q7sVBq-^Dw~rY0;JWqAVk2r+z4(%6UB`~AL-W%*wI*&oS{l-Aqn=+M=CvTvADy~B zcL)RV#XEZ{p6z4083oP9I|cp>iOMyS^N@F=_^Y zldVJJU~=aQz`Zb=1`ReMhi;t5Q0%>w%a=pXp$}oDJZWL*b1z>e1+cn z;8`X`{`jWS+aF-(3w^u3dZPSwUWzp;B^D8<{Hz*X646*Kj`;a<6MtBad2e0rNl^pvE@YwL89CL1%JVqgV= zse>j*fWNO4M+z+;F^03y2D^c0U?;)z`rEAL_&k+ZeFp>%$_~g}0pjgPw=VCQa8G$!bL z1m<2*f-yWYQK}A5I5`?F8YlG>VI>>y^iCyd&>lh4ny;dNPTvIq1_{B+xxXYR%X=MRj+#{&jP`bb8JN64zkJ6;oiA20+)mn+FnHBOw9I3gT71 z&pK((CCsh(o7&NlT6p{{^5M-0VGG+Z&-f4mEfAp`6FsW=9L*Bmy!>`xSN1$vR#50eJ(ZP1whukYwh6+dlWs^0}osHWv;Irqlb$DJSn6mod9q&-=@E+Xp z+<&Q(BVf1EKSblrZexnxX|R;J59im@&Qghf?iB0i6AnbpzR0D*rY3<+1DX7a6%{9a z=>zwb)m2K63L2~!anZsdx(O7+x%(Xf66|{mTLZ84@Rz5>AoWlb?O&NLG8uH?h|FDL7y`n$V{ng;Pcvs3am2>Rk(*6_bi^1xC z{KyRd0UuXN;l=7B-A88a$jyuSlGICbrLo@Io@CqiEM<*62k^X2S2*!wrwJaOCIqOK zY$ooAdiLb!W-tTf;lv>^;YuNC1SdcL1IexN#`#7Mi#LKr{lKliH^KF#cbx^e~sImdhAXDT#!!;r!2uPz^LZPFpRWj2*$;e zf@+{WoMa6OM`P0<;DyOB1$~!^*FYin<2XlQZ(q;~BQo_C$dX1nSv(Xrbh(%2r>QQS zoqM!9p&{{WiKfDSl}-8n{cn+5vM6OElG=kR{P3R1jEQ^N+QtlKi==3eKW2PfbSV+ku@d(AWPQ{jlm64TG?7n$M5sZ;V<~p=(>UnBn=>=>mK18MQ$Ts>d0v*tH+~K z`%8hsB#xSS&(~HqCCU5Czv#iFG_H2cq6^(2g@FYNX%M~pKVi|9(GrhRRzWJz}?DFD-^BUOe!s{|wO6iAa1E3Ggs`(P zi~atSChhnE+vn3e@ojCOSrN3n@6Hnu@y@*(mN|wwdcP8p1gUk7%F+4(xYJstSu1vC zwnC4Pb-8$tCKsB=r$|y5&-VIBtrZ9-YR8!}?lv5j5E8ES&xzNsPC1k&USR*|59?-8 zdi2P>E?`B;pwYfMhC)ah49dW-jo9hb)wX3Q3D+XA)%ri`AublsV7HBX!q->9c)Few zs`~e;sp`_*=_n3FID_g)F;wL_Km_rk>dRkZ0q%R+ zR|K(p1DnD zl#Uy~9rWnvnR0S#JTtHvzIg=Z6uqT8j}b~XUX(@Qxq{Dp_9z4sCR-9tsQ9b~e;kk0 zx+2?671^~!JDg_DRyeGiY)%!w1HDsKMn+$&*tb6v2Sa}@ttU#1F9N$yW_>>pbEYc; zOt{s=knzE^vw@piX=a7Pg!6|!?8Wt#VB`v)L*3q<$4sE>QvzsssQNDA#Y3wQz45+e z2LK#a0DtOa_my4^6jCsEQ>B`(Pr8M1bJ%@Jpc(npo%&(@@rBtsD9v!Riu3Z6PIiaKA5`0q$sBrR2QIwMoHLT8_&N86>5XWO9@3M}s4=%T z^x20H!`eMtpC9?_N0XjA(|N*Zv3z0mVAOBPjK}($3X9IZAPA_Yk|lM^65U{Z{dcWP3x{O3rg9q6P2kk&f_9Be00fzK<5Vu2HI2Eb68QQd z77dUk`%814Yum#6s#;pvH(XoaD|5M?JmvD`;5f3+l5bk89J==aRJ|5##QkVr_I!>? zhb2yJ9JATwELLL(++QxMjzZMk*|VV!n5zAl!AdwhC-?E;TtM>Y6p9;A;IpPeWNA}? z-jX8=?!z!+YuWzJ003Euf!CMH4c(x_Rp|q14CtT(zsZBrqN(Z!D}C&!%D%qozW0mg z%d|`aR&_*v)-JCc^m8Zr9&f1tTk?CQZE1v}QFCA;m{zX?T0^YR-Y#XIeuqV262J8$ zV#_&hW+wC^^sPbVT0&M9x8%(k&H2?K_j~10<_gP8&pNXOsm;;)nj4q!iIJLL7peZI zvt9|+AmehIqF@PIhC9=9DnB73A*ZgC+UJb7ZZTr(y;s)Uk82cObaRO0#fGK0qLWi* zhUb)>8yHBX3ebQNI9H38G%J=kck|48Z`NbM@T)HEeOx>KX`s{d2@|02;4tjAB`HF zZ!E_qH%B~=sc>!k@lR(%P7;YB5WDHb{``9PGP-{!1)D;!FRdI$1yoB@p8Lh_*rh`C zk502SC(3~Dab=_X6)XF8eB~5TZqBP-Bw{ustfGOCM+7I5X*3d%agj=@2+ybh#3{-O z7W*m_q$Uv|g->ejAv$;n9uW}{8F{CObDs6;(-XE*v;O|vlN#HD|zHziZtw@*#hIpM0o-=N;J7>t(Z6!*(~ zOFy4P^06o6T(bW#G4lbP>-2bW!kahm7L8mRQ%%yjhQ&`~s-Aq;CgwI_+($;J~*D3S87*yafbXMs*7Di^&ci9t}m2%K)B&?$>pIA-$Dq| z0T?Q*w>F%`EgOlAFpK)qzMhgqn|W>=TYd-80LB;#)^1;!lhSDS7&tj+B?iDVJ+zIhtWdP+$WkwcSg?fF~PZM7| zeO3;ChjK`gx2`+iwBGZaCG$PCLZ$69COxBlMWU~u>cybxs{09c@FWMLj&w3hIt?5B`Fq_L_H+M%xv7wg=Xc{xtzX|%KB(P*%L`TUIFyD_e zeP&$LJ?MR{LsX~`Q>(+(e5Fk$W>w}du+IMKu3c2%uheESt{u^;RDf0P0gZid@Xj)t z9*Cg2zxS(g;`8r7BO25sc|v09pIvbQ#K}Q~nAuzhknr4$z^=`MAMB;QpcFBi4tMw2Lg4wnq-3xAuv&t`iy!?zXD?Uk z)-$!a1&Lbtrh1IMW;xmQiW4Gf$XP(ae)H3@JEjXR| z4@|#^nYKGO$$$3P}s z8WQJz`x2om#Wiy&V2Xk=!14;L$SB7IEC@e@96mB3vOBTfNd&vB_nBs1K83@C3seB+ zE{%N2X0Yl@dj10?{7Z0;opq#DQL*{$-8$PMv3o!1Hor7AiXCoOvTbzuiTN7$uP1_m z;h+-8m{CYbufGwDmBbE4%XYpv%GcJ{XRMhO)K0XN#me9@Z+M@ZTev>m+o+x*i7oSH zF2Uz;4YbvnUL6uu>?O0!paVnU6UPeml(n_BHAPeR!Xe|!XUo=madZdg^S`(d)?SsP zUq(cn+*}Ej=%;>g&b5#@1O#*xKxJFHpj}QKx~35PC74d1s%IJ?-_Aq-L@su@z8`27 zZklHY&l^MzsYL3XY3h#04Xtk)P(Y#0u*yUUmG$=u9lrhZT!ApA4*6xIPj0JhJ}dFv zE8lIi64a^gJ^slnYW2<(#4@Zeff=@Wu8g0B%kApqRefY?680ZU90+$dRa#L|!<)U! zn~~;c%|T~l)eg4eJ3qynH+O@#N*Z-Ne@-!#HVVqhR?yuyk{7}-BVy{h2C=+RBlAv6 zNHglb&^FRp{Yq9N&VNGQd(-n?N}>EP`fd;@5eHnNGn}Re@Oms}OX#UAf!S||W@JAu zoMGMasBGm&a_4`-Yrd{IX|HHwQO)Bmkc({C52%j}t zsuNZwOCZ5*+Vsf8d(nre%IULREvVyD26cS-(1$zss#h=QWLH5oQJnNMaz@a>Vcz;! z1uoH>kebROBJw?5)!}eWjnAwF1z5RyJzJ&2Nm*-yqyA#x5gW})`S4_ICctz@@6RLZ8s1z~~3sb5ZiL4@%nxrtDbTcrAZEK*Cz4nvk;CPG+Cz0cJ_IPxsG;Wp$C7`8vLhKK9jF zgUS_2$t&9FPdh1w&J-nc&ZP}kQEQtOC_LR)i-!*W9x=L@8OY ze@vu;1EPv3?7|U0gJYQGkqCb)exP?43}ldUN!=Q`2;j^WphXkpqe(V#;zoxz7$%GY zUij~|q8^+~@Aewl9;Zw@H5;!DzG2Z2KhAjibH+rsNdEyF%;NQU@hYg+!$mVg3l`q5 z%l%-N*u{6fnwrcz!nbZk6PMFQPXzP0z2fp)Xg?T@F37*FCpJ3nC+O}DT3p}z!Otal z#r(b6AvfdcNH1I5V5Yd@Gn|D}jej$l?46)RuaDi!U*})1(abTVV>FP{Juz=(JqWCu z;WCcHKoR6=|=;PHv zROx*UZNDiQLPCXzx9IjyUjB>+jlBB(D(4ct=FSU^{ClP9PNp1~*d1bLp8X0K$&tsb zI~)?V>OP@zn=tkymFyUIo)(k^UQaHENrnE+UZiA?#{_l?C_d8t*szyS_2gBF$s?QO zG~@0Ok%~QJK1oRpYVSR1?1|H^E$1E*(}D3Xp=U_-E{if`LO*%QL_Mn_gh4T>&?=bZ z{I!fJ)!6ryFHfV`wGNrk4W1n+?aRzMYS^ByF-0kBXfPUdL@Boi1G^7uG$cmx8}9DN z9~;##M!a6hj--xDRKKSz-XL+cXSq55i?Z(KqP4dP$7EiQ}lJrQdMf@!PgF6N_PKh(F-AbF&HKh%| zZY~jQ(U){5kx@j%ps|{ka)}N`?cfM>P+4~S-P7-r<)>Rdi`Wyjt_-h0t;=^{@&cYQ z7{<|k_G+&R*v+7pwpzS}_G}h3c-17NRO-vXm~{}H##^MuOF%e(C@Nave2-()K^)Cv!_M$6~l>All! z1OoNlF<8AfQyzRi z2Uen#79oaCjXQ#NBX`vf4l%93Xl*{b5k|Y2+VXck3p#e2QT$I*CJLz)aqa9U)Z;TU zLYGdX-wQs;TfA)w942f7G}k9*dy6AQKKz3Z!Vi|Z*MT9z0UFL`i(PEAkANwQpvcnJ zdN{N3Li`~01Bm-mIDgjak4t=0oT-MJ7I?AA;E2PXX>c?iD6BG^UF{Cw=E$(fDX z9;s^>qFY6kFmvO##bfyo48eR+PVUB|s49nfg^|&bRZx%Bf8}$y3T(VPOhLiZpp}Cv zm`sJy1_hT45v;Uo)}$mX~d1ZLQqweyKXaHOoYoor3Ye zH1PaGhaY~&>Pv!tLq=?E3Z+u;4WJdq!Y|rmC6eBVdeVII`7y4htF@I`zl+FXtgdH0 zi$^#DO#zpy^}9`pv-MsM!Yv^0gDoKJvLH36dMg`?ejJRwyxV~c%ZbRxUInvZzXk{l z)%G)?m8K;m(A#GCMEI7QmrT~$B_ax1j~mQT_nlhKk*nr#TY(F?J7$P!qThouzLoUA zYHp_P5zW=Y&P-kO)7`*^tKDBWs;ASRJ-W@eQqxv+ZDdQDO`6WGFo~KTRX@4&TF`l% zPs%%F!Fw^Bq~I~CdH<_e$V4cv_mIfCQshgjyYD-5I-C1y=KTwI!g1qz@AM*^=YG%y z-Kw=MoLmKOMl<*Q3gh{D>aV1Z_j2N>uxen&KSslH){4gh)lY6nm8O9=I%_v~^*{@# ziuT5?gSQs5|BU}By~P5%Gs(%O@>i}p>VyoSG>8DKp(v-LRuWxKIQZZI5xoi%;)#BS zhB%>UM3ePiG+;nsQ^Q%?Q}Jhw7s`AdKcXw#*SYErU-E+K%cz5P;b|9!pC&hH=C>OGGfYK?9N+~|~$92t=hdffSjxt~DVH`0jm z5S1PUY~ffH=CBG>M*%3`2ZzI$0yv~?LV+in>xf8@G6>Ah&fD0T(pXn< z6b{63eh{Ip%HgD6awky14@o(v$RWMyvPHU8A{<~!FOh?drIG7G^6^bZC;dMZ7I2Ud z89pp>y0)ml&^2Km2E;;8M3%Hg!7^U?EgvE$7eO8SLJpct)S_t{MkCfZyvepk`<}X^ z@YmJZI}U>exgTS)_;zSq`i&n()XIfd37hunI16ZhCDWp&40D_Gk9_f$kL0?m^{sgN zy{fiFP6jY((wBc6ayP{(kcVhMp`=|^cpWik??3E)hsNTC4$QYYl5&{6x_-f9gXwot z=W??UZim~cA_!N&&3-am3_6GyRQk@E*Q+TgVA^G-lsqBfe8d6T^^V0wt81yYW6KP% z5s&w=adVkHGjR)m8<`cYf)epPG)&@-7=4>v)072yJP!PBiT?Zl@HsUb5wFU<`?!`I zkzD-xCDtHv50M>dn2!9w5FCf#Nw@c6=4k|ZwUCc)DsOcaaPI+`N!vizG|=<-gHQew ztsZWPEK^y|v+Gam32;^Mk1Aamcv`f~$#A*WOe5c8i?7|__v6K;EMT^A+_zi#zBZQ) zA7mLxvoR(Py_Zzc%|7xe^tcJW+TZ4z;JXZ>kl~;5sC;NuSu)ebw?tXVi9czH)dy=CYKIF?OL_(Nj^!M@h(2^kiT;W?*sL3$gA>ba#_~N_@gm- zdMn3K-Hwr;;qI*z11DUSzqt0X*cauHI0hm`qW_t$!@ULQ5H8Iir;i5^Yok2RU0p9I zNSg`vg^0DMn!N@kL7wi-54Pt^7J)VQOk=+>f=rz`RZ8azB_KNKE57SL#?KQ*B6yGn z)$jbi5AR{tlU<@=J^5oe$znrs^kuxb zpuX>Kc}z4!z9suT8O3@5z~!xs2>LYdMTHmZPq?ffy(qGZBRUKv87_67U%`ItaGyICdz`zgD)Q^#cDb-Nu zC+=%N>rR{c%JDxQaZDAw)A7N}h(eIUyIuBpqq|7f8^M8dJMAO!Qc4JrA0w$x0vZBx zCqwbFllh;A8VMiHir9>Ty0)A$1PUTtUeSs!2LboXb28C=3A@eIf(zd!Do04aqH5u- zf5zSbi8KkoV~xR%dzx(i8};W!i;BR2cxG8_D$}L`gwhE*KAaca&Z4N*z!wyF4^(;OXaFUk65-ZwM?8ZI z^4`H8q~S%NT<+g>S7KL2AXAk@2{ja;)*$BB({)_9}_8)Lk=IH2m5MpcN^f7c<@TImgEqz9OGy$ZU zq=3NYB$mL%&)#MpP5%K5PW&4%==#3_gL_}f|NpT?Bss+}2@`AsiI@qy*Zw!X2tf}M z0JJlo6*D)e1<<#i<<))fyIHB{k*C!DKTpuYIBU8&doEYq>S1+Fv{JA9&CU!_xcy0> zNLVeZa=d!41VzBNzjGSou>#NqM<0387+r7$@H{M=;ns)^1V~%2-;N(B?FaF*qR&c- z!yZ5wxYtLAm#O$7!fcs=ed%(7;A!G#13AReDT%_&-K5;w1nS z6L7EYARb1i!mg$NIfK7%9~w7e8w$ngv+p7} zT3$xdnvCH!;OHJF$&^eO+x>q5ILzOR(GCFpg?V_(Vjq!_{KqFJ1aSf$ci9)qv7+#3 zh#mNUKW~n^#1v@?`3G*WXEgl(aD!K-iqJkvv$3gJ?{ZQmGgw(iha8!ZQ$x%%|Bx!@ zc`b}a6OsK6g7@a3x3o3mN)J&4`zzk-7C+L~)T9NICz8M<;U3VI@^&^!>YM6mns@gN z9P!Z{5O4C?N57U00W`sQdyw=8i!*B83igKMI>jQg#hoZl01J<#B)8PTEiWGq@^!|W zAivhp`J8l`2$s_(Q_5lZNd})Faiyqo2r(Jt#~r+#p5=(1RpQic24Kxj021`kYo?%* z!pa)1{5NOn5<%>6(;vEsi77&xBik}GIG79?AU~hjN)~V!2kdZ4AFM?ULUeetR~*6||2M=% z><>;N#M@Inn`QX;Z!Cbne?vs1n-dz54v3I~lwn?eVKy58i`%7j)CyxKqsa>2ymar} zoc|5Y7#0ukaoLIc_`Dh|#4SrlO#*>e%j8fzDCvw^fQ0Igv0?_$l`!ezcnh_o6r@)Q zAvR)dx}_!oXo?Y1G3O=nkVORq9Hpf1fU5lQ01~kiXZOKF1kS7ve`WvA{e(aR1#W-* zD2S`!-$1`_S>4djG)G^${k>Ap{Z6Of5*aj}YAp}#TNq@`)vsd+Wo@Lvbffa3rYV-7 zCK@`aB|z;(AB;vHR9+07w2bYq?fyl-Gwp>2A1_O0xH1RX!6B5rn>NWof{jnI=kz4{ zdkRP#A@?BBB28u22 z4Eu`*@U&hQ2o1~*tX=LxBu0lM)rQ2>H9Q}c@!~0|>pZq2E4Lq9| z|4tCZ#<@+RLc>?9<>pD6FF2a@Q{FiD1|Husacu%Z_?5WrODpT0WPw+&5;Bqo1X+Ul z&>!kTWSb)(Ym@|XYX7xw3U4b+5T!rGo$bvH*@P|kH)I>qpS9BA&QjMzD9bsC!P{`( z-k0FHDh?rO!VHG!J|c7%k%>sD!UTZY-FF=3PZ6a9R69M7;sE3x$oO@j@_|Q}cFCUN zhqLuN#{Me;*LqZCUGK_lT;M$D1FD6z?Jy!Y=N)7uSM^upLhSUMMOm>f2@9 ze*ui;_>eRj6vdo|dG&&+LHzS2Dm9|EAVO+o0T>qu{dbS`kepF76k+Yr`kQ?2LSP*C znaz;?IIV3#QjcL}jGKY}Lf8x}=~ocL41I}ag!fq%#6w3X4TQym1`X1O8S-d`K)(W; z*KysdH3yWGl;zp0>qfv{{>wt>q9$|%HMszp9z03>H= z2cv!vB?YX7BWJWqP$uT)kFpM_bbnTd?9*H(c@Rh^lm^fV1-NiwXCDgppkzh(e4^B2 z1b!7;F*6)SkFYr4kC}=2!oWSlahWybf8Sc%azy;kyp?0wf?>I^L!6}TuN&j7EmkgHGck~$-Wk$H_y*9C%Di0vhn`^eKzlF1c zU{MIRYPV}h(c=S2-lL-`ZKE2eS!U29C3R(m(X}36geSLNA_{MS{WhjmOMw0`0j#Y8 zU0GzrRYuTB+o8ND8xbTRB=ouchjsV`pXa=Cz+jn@ta{HtIgoSR14sPFhAI&dy(VU8 zoD(_npT8BF>Tx%5J2Xo{4$u7);&3&9o%fn??G%K;@3%8{b=u|0h4FUG?>*`GT|y4X@G_C#kt<)~(?jp6TK`a?yqY&=PA6^w%@n+Q{uIr) zg)kLNv$Mt~*U@oE%+ZmJHanfkl|*yxL#v8rzP8j+%mHHDmrj_;5Kl%wsCdiE63*IG zHCk{y;pR@hLFLg2QV$Upw$5O0U!2r;7;T78}M#EjZDLFUyL#y36@eAYzE72*B zkBd6|DG|SIuYJ0dxq0wi=ehgplGjg_mGYPXz#`#wDm$3|lED0eT-De2^U69&UiZWS z5&P;TO;aksp+yyX@6VL4A%X0RT8&sK4Ny;{C_FDQ@bhj1(NPe}S1JuPmjM+N?eFRI zW9%6;XF-UHwBd6_W#tD@pUh{qhnufgYVx)TE|8h{<{kYci1p z9~0modngM(cw{0<#_fheOB2-on&V7-$hL3wYRKQF#I}!iDY4NnF!6!*N%ex|z zN(E|vmj+5Nqe7`OB7&lvDe?r8-$Qk*bRbPq|9-x1T_y7Nj`jJ$Di~5s1A1K1Shrss zKWfM1s+9t$W@P|y{^p=*%BHVBq~Ij*%!K6)-Ia^A!edtjVjP00Ge3C zFX}LGQRq|cQpb)bs#_2*%!1H^M>iaZ4ItS;Vw@BKpNJrTt)qkBpcmvKDoe}DAjNN; z{*>HM;WZX$YW>pxa7XB1V02hnL4o}aD{B%!Ho+bhJ5CECnPbwFm^LdnN5^_Oz>}^o zE02W_jT?PE5KRhv2gV%@nYe4DnuRRBcVWLqUKh+-4h5MAu(;tueN~lBLefLk@se5A z^^+Y=%dVJYCe_rwH(oE2-&@6kk*#95`PUnj*k$~nF7}_U2`Ln}n3Dq>vgP-*m|v?8rFEexLUkAWGt3f^>OWcYUggm z4zn*~kb=qaz)b-pzd_m{2qEucpp(2_)YM8)B7`!BmesPO%iRO?ik}PNcqWhjtqS5f zJbT&$Izd^3scZP_6)5i>Xo%?eg1&o;>IyF!9;pBH$eD3z$3|o=uBq#40&je>5r`Oo zLSsj?XheV{0rrLU{QR7ptz5TQ?v=1>POZ2TnSlNKBK>mhpG}u0;y2+W%&H(D4pNRR z-wwAyJ}>E$BR?az5U5K?wAxJgfM}_$0><`PpTY5dh@_;ZvOj;WoZz?jw&8s3{^oph z*3+Ngw3e3fGQY9_!=FO%NfKx#Ndnw8@0oqz08Ek9N&d!WaQFj=pY_Gl0MD8$E@^$) z9s{N04&!xqy+GIghoMe5g}^fU`FYh zb93p6O$-J9PN={`lvAiXh_KdjyLNEqpi!?W{}!QcOAe@k=Mz z{<1+OI%KkbGeM2i?)tn^LP)waaT|X?d&Csfg!p#~%Aj)b#5smRj8b+%=4DwdXt6T) z`nXB#YA@97M}BVKJZLU&61XY=nxroyoQ8w0t&w-XX{S_izf(lxv1Fih`sGQC zIZ`yN4&L|TldJ<2$ma*T#YT4+c578%+g_-0tAIYJ)&m<~0o)IBFoj!+^m=KSG7W|ts5uzVQ z-x*PQ!NvxqhqA%ZSUsQU<%~jU(9|o)ssbh>Nx7oNyMB4JzSMs0U z4At@1pGvM%KKb52mPj2)sAu9z2HN2VQ%%=c{<>-082P|oLURwcNk(?IeaPL3b5K=P zb&IP7&#c%{%*F2&guo1XKM0)%iMa))E{=UvGLOk}*E{ ziv0da>dx5O$491j*J6Cl6apOgO$gA;EE>k2n-k9F4`&G&=|da>X_onUkmy@LT^JH* zQ3d4)s;djG^HXGWDL^T6#gSkQo23%>M;q*USH(s2ocF=lg6t)#%6zSY_ev98`i1>0iQ-Gki_s!lxk-=LrR-BhKks!V$ zh~LuI*2a5Y-t98nc={HkVS0v!h-Cx<>T;_hpIsl5%y}>;%Y*D?a^sbGa-3 z3+cMt`j8j^9<27K3QLFNt*jUWZ_Zi1J=BrbA=TAY&u z?dN5jO~4Js?c^#&!Sh!K~9%S=YIzuaP|1F|v;T-&T*)tFM6W}2fE z&trRFe-r3mOgDua908r&D&>}XYQ=bWV-s7RTn~Y190iYedB)9#D*g z-_~z<%$ve_3Db}~BpllGx!rVepgQpClFg~nCk>8v37~=I)sJ`&GUI#Zvtm&&iD@My zs3%OHi4WM|45SNShKHYfZzKC00{Y6TQWg-KNIc>mfYvt8zt`UZzy|yq?>vpze98 zuv9OnEKkl~eKcj<53%&_?<<%I1Lv+*v~FrJMV9_FMrL|FijgI zc6Cy*J{!|BHNOc77cN$1i6IxjK;K7aW#TB?=kYE14&UM@J#OPb4-8IZAfLXJmlH`UfX{ zIf5xc>aPy^ztD+Mx|_awHc4O{F{ z7wr)Y%*@0E1yN7oNpn0*ccjs5l0NMf-00EO8uPFfvXz^+-}1kl#o53mrq|{^*_kG3 zZEe*gMEUh(I?8MA8dv$bYhFo-!rrf%{kAz0dPc^F@7>z`A9LU>?#NdWiwmr|2*+14cay2e=ooovnISIT z;FrM^FWt)_+mMg;W#1ip>|D0LEO_qiALD7%>r8y3b>3}K=qfDbe%oqOX@2^%(PNi| zxdYyOhZ7UitYq_WhV3_wnP8S`My0%s0_q7~c!VM(`LN=&ov* zwFD}G3m!s^!RczrI29HiqMJQ6DL9vxges$n*4m_)D5~Ozq?N;SCEiH-zc?lTsP>i+ zJKxSA?WV1AV=|v%ZRkxXHpTe6fJAU$xdNumuH=B5y&QslI*v{WNK2F$W zpLR@5N<0o=ymZ0n7~gK3IT@EjXd)|4Cw2m1Y3SWAI%+^jvw)!)FT7`dz{PLhcOjGT;wVMN*EO%GKUJk}(XUy+}fc==g? zD)t9)u~9WfXQ#Z9nrdiG>gm_dUAr|}(4VY9)mjhSYJRil07ZonJoyGDcJsIr7sxG# zAc1Jhed$*%(g?0iHG?{-2QjqF%(TojGT?*XQ-%F)3I0Q_Lckd}S+M_$%Lm5%6Q-9l z!L_?ay<2Ho3mv)(hMZ^2Ck4O=pKM%za{t4c{A-QKzwEm3LOfw2XjU}>Y2bBPu zC+-+>dWX19I0TQ$!2=3lD@tGxJSGAQI}q6{DG1U5#h#AKOcV9T;{@xff;;It+^caE z#x$cuTKhCKNacP9aoCbXYI*O!^#LMjyutr`qeL+Xg=eA?y$1g05-BV#Q2l=Xb$;qs zV9z;%_DRj5oYG*~l(z%xW>78}@I?|{n9#dUz*plIdFOJ!8+JkHwLIYQj3$L0tL#eH zg1%bSDUXjbs8qnFh@VYdPlU5S+6X8Y{Dkv}rnf4g1FBEq5%AL!6R3@%@ZiJEG$~br zx6&o{`N6_BU zu1~<2TayZKyfPIpI&+JaR+`&OpMflc|j=kk(xr#iUE|v3>pMp zRz(5ytQyfvyPk zcG|Yqwjc#uaKVz*i5Zj631BQSFUM;TA-W0pSX}Ig;s1h2p}+;bdJpbDj|0YtO(}Ru zp?V;~5+V-Z-5^R2)LItsOioTFXB+76&I+aXe?HAWj|K)P0;>x*aHl%3Jv)(+Q&;78 zaVjzVsjaY;_+JRBZnuRH;Q88k!5Zm*t_X;lLV;V*ipPo0X1#+8$~T@7pBL7`y~Q5x zZj@?Bt0I1`qab|l?hFd(0@b%;+2)?Ww#BQDPDz3Q2giu3e)*e0M7*QFHfepp0qf}3^WA;>x4=uF zZ^5zMB5DIa0Ek?@m$F83M+JCm-<4F~0VotXfNt+9+&vG)Hf{^{STG65D#U-veBSqW zT~I4}5E}ZJ_Z^xog-N0qD~bk_j+o-bu}HAUkrJ9h@$2Xh_++NwSDYHb#p~vz*2G7- zr^rT+gPSBBz+;%-U~MG8XAuD_REj4J1kgLeKQ03jkeObCwP6~nEd&U>IKfA=3cK>F zn7Y?5xXjY)!eSVKg2qnb5Po$3rh6J#iI@eci!cN;b&#%t)i4ddI9Q>8sa48-W6fl)f&bLK_NMivV?r_1IJla3%{0aLPRl#YdzCiIoRMOl2t;+}z1AHmgsU=Uk z=jCV-qKTEnwLmPG6$eFk>@e#xS|?!E5ZbQT-&G$_yzfz04{6yNnQZ^J7NDD+OpGF} zZ=3HZJQ60f}O&CSeA>huQi@_6GKBC|;993L6(GX8V5T(kh!GE!JbpCsS) zio08>-}#tbGIoHTGo4D;E<8GU_|C4$64~fr60~AUT=?kj&%s*b|q$oQQwHqov z82ESyEXvyy;!J$Y=5U@5V$6_f{Dlp!p%MseG$p{tBLB=JCMCt$M<5MqY8xU~%tKOt zr@nuzV7+`g@1kJqHs)$J{Y|Z(_@Ao-o&ew_z@v{DewD>!K-z(={0dq(WW>Tw1G2JA zolLE&PGWp$`k0Ho=9|HfLqgXPx`aV0gn5wN{9qunRAs;dPZROYs9MO@;v0i z!jM3tk7)k@?M_wm=HS=yz`n(AYTo4&a}jZrLmAizq*?!ZieMhVW!3UnyL~%nd-qE} z+f&>i*WUvVCsXM&C`>*cKyZl(X*Wjw-&eelsNa$I-4=s`lk>6{ad!LgPcSgwHBUkk z7UbhI7iC;7n@Sf9R4NjrJuL61NB z2Jc3V#SG3#ItJb56L@TAFRwxv9V9^Aq@ zdv2>g;Utv7Uvayy!w+}kI>|<-$jCVGi51 zoY6H2K&{{4egrgA9Pi?TKjDW%qUZd?Mo_sK4Q zRa~j)>PAP<2*=*vFG)k_2s0=JA3!+uDgMjmI=us!W=`9Ra}{|66Dp8yJa7VOdX_kz zOi-iG+YSmF_-B9^Cx{B&QX6xEb@h;wdW9Z`TM`|qKywg1a4t?r9NHP63K@u5G5-ItB>yeYu0sFAH3Oj-Ju2Y>80M?>0KlpvOugfTR4EAx+8MB z(Z+x7F2n7nDkVrvO^@0)27w#pSOG`%zUMnbmMy}Qys!X<^Nsy#@|{t&prF~fh!MiNL}d336e!oNkK~s8qH;Y z8anvG$8I;y|D`LM?^>_9XNMW*XbY}T49AutClu+!2d;3^hYnpT|}n4s;$2Iz;SswqCHh@)C-qpyR3cTSCC3cYzd0 zp}#C13`#ppU7f3nrXVoL0qJ-lAYv-<78q$GRoisS%(3rWttoh_z$WX3IYIf1F&?SKyc2Fs6z?!>WgjEyMOxc z^wb!XK|H`77HbSA6;4bSZ1BqTwvh;Hh<}S3Q4;X06S8U`$OUxDV4U`+@Yy?PzH4B- zi-spke*YHFF%J0SIKH!H{GT-KhC%@Xn*TTa2GoJn1mZmi;r*8b03HJjpcQTlkOjf% zR``LW%hTh)({rycKnGm_GED9d&=}2pE%rY+e|&osX_)dgb95kWSl9$|Fx7j$YdKBt zfmD;F^O++dmC0{R5xr~N{OPPu5pj)|8O00xl*McP9{&WKA2d|29euKm3Oleh!^-K* z^ffi6Dv+Crqa{uxAfV?ZG5#wF0gV!|V<336;L(1RK~VWWvk=~d4NTq-I`3wN=?mz8 zMM!Je0Bz2kwigz=im|#&S@>G>IeM{d+QA|{%3QdZnMr4nDrAhkue_U(TWO~bh z0x+5hpv~PsnTmIu&gOj{349``1oxp$371S_#F@`0dQ!>&}U`_B5gp@Y%vLlWk%>jU7^z2TJY~a z!^5er_Vqz3E2oD5`*_j6fB$YRRYW*oqu@i0umeUaz4^!k4LYT$xTeDZd$QC3!OK>A z_WP@@0MOc;9c=vvp^{kBf}Fkg;ZWUD?42n@^~a#0{0hB2i^?~v?L6dz@X+c2xf$bLhp9pOc?B9M+<=vQjg*W|kkGjq_1+i9n(f9$Yc2rCk zO$Z?r!+8$c+3Gz^vHyMHchQ6d9^oIso#*{&c=j%gcRy(F!r_UMx~GqVT#?zl$h;}1 zR)AWnJ|~@&bI^&zZuZ|a($yUn`qH)Uq!CRS=$W*|u?eG{LwnGJM#z)yez`(_H)wY! z6_-&&wA?XW&Rv(*t_^PP$G<_G|9<(-UlW0RI<23*iOg}_^`$s62j;;4W5f)`Db`5- z0nn=5|1SXT!V0bs#!xn_w8?xwKuZIj;U*OTyCmK=U^ZQzv#ALZ^O+VtYqb)WPP z0+!~u9zY6PbMJmD2$3smr5=VjJK;RI&&BC6$mOUny78iU|-F zi#<*C*X*c}HD%jtPWOjq_I*e(X;`4$ij{l$x9Mt*LEEzeIO&bi?SMk)_u|iHb3umn zE!DK=1Ugba7CS_;FO24<-A~x0DI>2Gh0Y85mLRo(W8X}{XvQN?1YWjR&sLXSNLXn5 z&mr|5`As2DEOhy%(|!4Cp%e~893+oQIlL~nL@^!r;THX1xbiD7cF>Nikml=SY1-ia zA~czzTjX3I2UEc*@&J@iS9`00Lg|CgBV;*@;*6kTog4C9jkD%1yo!JBSHsfvoDD4< zVdmBjA_5{M-?9u5a)s0K@-lI=AwGk(eY2CLR$|h?YiJD29Xg_l5sDuuXZ&%bBTz_0 z9Aa=JHDhTBKyN{iBse-!v9eV~GK5_P%hq9%S*tU*5mMkRRR&yRAC-CrM!oElDI5dx ztX>(xIw4k?V#8!oDB6p{%|2&)i7VnTDuJ)3|5KLw(*!PoGx3stmmMcl$k0Bm`g!XT)H|qIfvsmPNBN-!~{HJ`ys?eN>f1@9I+cK}C3rxUvM%qtPlJc|`24s%8cY zq?#pKdPYbU^9at$qBc8XH&yn2r*l<~OgDvxn*o(}1V1?4w>BqmdxCr7stJ)Nok2e~ zR>M1{Ox)+9HDqX_NMfgIFkvy-pMSQtel(D8Z*M1zY>AV(?$NU46X&L@Et@wR%j9O2 zWsCAn*@nIOI(;s9<2a4K&^6;SIY=@O(jfU*Iy)3avz!kwTPf>BWC@cnzWMXHbGRYF z<5E{mp-*Pn=>WA9+y!coZo!q@j<3Y72)?zmU-mq6anF%0#+`uFEa_C;JYWBfB+SFg zwx13r4KqE<2USygvz-FdWM%-qNvf6bT_3umH#x5dE5|OKnq$nD*IKm)kv z49ugXcGdCX?oQ_JY<)l(+59Y;B>cxR(fxkz>BaSRFL2sk=&aH)L+ghw@-YwPVvlgE zdZ2*&yI4^Kf4_)~Y_ROzCjlORwUb3 zcsDEiOJcMs*xB>XU_<)ydx{rK?z0yuDR`g~iKk95I$v#&o1Qy~UxQzfi%||^T4M=( zsr|&6T3+JcOpKk#_mnatLVxqwP#FknsPsojz>!}=c2hw)X1rh#L{`G*(^FEy@@{Zp zSsQu~Ecms-h*m9y9$fID*HO_NWyO2iXs;FziGGRUBAND3N zrIU@|6cwNK_Ya***y#AS-wZy{F_wkQ@8lcXYe4fg^^#jbW{?Q;sY zELrgA&jSc!{IkF<5|2oPo=ilX?YrF94{m-4M75+?N%Ji)L!|p#RQV&aqY6XF-Ue|2 z+D6gHqRzdU^_x;^WFUc zTDer%!0|-bBdk&l5FPR&;;+B44v++Vs3Jv7s~J#qhFMEHuE}5<`-(~kzROwp+n$h? z@1gBim6wZsEU}>SROb`3O=BOP)#{aEXeyOE~H}_Nt z2?2}6>R2pM?5BH?{$h_HhUJ0PP5%xQ#;;XUL-!N4@{O&+Vdw`v*vKelFwOBq2yKcz zV{hXUW(K&W9Plt1X!X~_8v9?`*l_+VQdQIFa53KEo_=WF7{LR120lNGqO`Z&L-r5K z9QQU$V&W)e+GL=u+NxqjfDB_@t%szj(%mHrU|kO~EIHtPEsHEoe=ngQEK!4#cD|v{ zypD+H84Hp}CZ%Pj=ETUo5ktnzL(1cHA=A;MDQfv+Nb<$#-3iNh%e)X!JsTRBUq>bT zB+XrrP=3nn(a)7Eiz!z3yi}B>o*)Rcptjz^R>*L27kubs`alD&T#I>sd?MfaGc7Wy zw5(qd<5l&o`}0Juh>UqWi3kG|vsl7aD>(-ZLEu{xG4xZ+)^=<|~E{6lUAGrClG`*x74-qD~eV+F4nx7PHfX*;Do> zg^wa0_>9jbFf!R6CN23*ZYpbfDkCzFN>%_C^ZJR@(3c3M4ej1m--SHaOIpdUoa#9P zgr*#=g=N1Sdp(B~8}O{;0f#!|>+Bbs9D}eaj^{-4>*=Tf%26iJt!$=EcXsHLj z=0*O<$oPKJu9v9f5Vvho&Bi}1=fO#E3B^y_nfF&Er?Sz?7)jH#80(8_-u@z}KvCFx zfHaS@bwvsQW;l~a;be-W8wbd6Jn1{r)}g6mAgZhZf*~M>o#wlo;@b!HDisvKb}YJa zOzVRhF_IQ)o3>JV`8(G=M0J)nFc!<+!|!qXqi7NGWegCANNbaAIYHnlR(!Yqz4a>N z^r2)TyYBf#^WJ^d3JMB}Z-`HkZe_Yr2B9)QOuU57Ly@02m*o-0IIvDy5@}vEqWW#E zw3J@jr2$oX!qCg5;9zlvV*QJBFj}OjS%Q23@ z(i@ZC>+-(N>n-X5!6G_O^luxophF5F;WC*5T*VMIEV_=JGG&8N4`x-9f+(2`Sxn43 z4lG4+X&-@CL4Ty)KJhw}3ANw**gdnetMooLbQScyOG9U29fR@Kv>mbWm)SPI)N$u+ zc6509m{c{gAT+av=zrT1jN!zjK`lfPoylGMB#8K}#isEHEBxYmQ4fd9H@3FaSY>e4 zONhU4be1wHLiNe^2M@RhQEvDiV^Y!d^G0&csMy!^F=rR;M(Ou-5 z8nR5ImIP50x4vOVd0$V{_sb}TQ}Bmg_#Bs#AIu*MRX)3Me+{ffpm&us>#bkYLfUlU z98@HVdHEEU#^Fk{qfS9)Z-qWCEy{4ybcilO{U_`IWQ9@Pai-_+;7<5;!k09Pszt!Y zM;}*Bc35_N3=Q*4*!QMiJGZLexXW4EL@a1Yh7w$SzW+Ci6imz~SL=_Q>>rRI)Ql;W z8-i*6WSfPtgIB^ODc~yFaOf1nd$xW{QlV}65Xr;^&q^_smE;(%Lq#EK0j+Dxe}9gM zN?s{}x7XDK_Rxm7eY(Ai#Xg(>-c|_nLvx@HRW$U7gOEls4aj)oWh+BQ=_nY<`>e_- zOw%SFQ*wKvQU>dUI8gL_7r{x$!#kaseLhPEaM6mP00a--D{B@%sK- zBGw{)v;p%iWgyVhB;p@Cg@@%uuHTE0Vm45{cG`0Nn1knag?xV#ajL_1tcZU2k%!j| zOirQ5Jt_Bqf(IV0VK@)Y4P*SU1*=ioJd=e(+@h#dM^0*R!a>2$j;J+z9=c}Y6Sh9j zi6qDcicFoW5Hj7CAN2eXb#Z&yBhNY!E_D?WF)wdmAK5HIF1}fHlBmcfw}VwD={V?f z&Zh%bkzhLa>CiWs+mjS?5#}SS?4p9%t{WB%hQvM)Uw947i-+*WwZ@vxC#muJd`L7cUv-b=Rqu zva*rh2qxG5DKmeMj&fRze(i|E)<>yTuSovZ26^}*wVew5-rF-htB*-a%*rd_=Q{XE zWuy9N0{hDz;qt&7wXkU=n3lu`r!H!GhroK~z{%V%2#RdI`g&apci0w5et2Z82ikwK z_Y||b@Ag!X-FydwipndbV1?{jD7Bx-KP#%#YJ{nuYq_Z8s$unHM?m~M<>f$_;BALM zh1ca~*pgs1<70WJlkzrd1_a^*XQUwx&15_2v}zs5@+gyTO&Wv*#5kp@dOZH@EGZk}BMCj`{&3a5$v*FJvB@m({p(f)`cgSs>={Nw_WwiH3xZLs;NB}`$9?>*B-R14Ck znbL!wLUm@B1APgDt}O&Qmh}n2?=ZR}J7FTMBwRma-I8#u>r&vQE5#jmGFSn)v6kk7C_Y^@rMS*e1IS71}!S z@Bxbta&q(t2+?SmxKUY^?E<)w@$p!;G&acq4o7U>{DkR)Dg>L$@`PpF*@jXwP?V=v zF74yR0s-MS&C=45MpmCuF(l43Mf;OFUMDX|ON<~#wvw3rubv@yt z)@3ZS4r<_u(X`PAL6wD;QU3&UVVTrOi#4y8fuf8F2a~8A(sEvtuhv?0XBQ(eJyq4a z{dSxD;_3!-b+G?-`ZsGvB@-Qkw5==q4(86$%-wX6y_OeB0R@HW&Gi(mA2rZ04HXtP zAbv7DHfB4w9~!FD(^^ytB}Gkrz*#96zRreZFS;6fxA{6pud@&sy7c7>HaP_il0WNV zOjp2qbc5lc*-rho_|v@Byvf~0$Ic(9kNO~p?_Z86rHO_=ZpN*icc3@f5q=G9hu9`Z zFG#??RRe{6#1p&(|9wv1?~b|8WrsM=Q0U?Q0{{%B;L!-9Qu^8K!}FOXArf8{T4dr|AoVvx!g>rbzv0IdXU?+O|2j4^GmO-j(Djt#ga^O-3LC!l zH{hG;z*EKL##3&h0<5vIacFo%R4Jl_hr80JKBpQfqj;k$X0j#H;Naj^lbu){bX6xS zd}{1Je6=-VCnX8;6$gS*40B$F2gxcfd0>`*Qe`7!95o=zM7&Bav2)^84bt;VpWR#RH>&w`Pq zeGdDc^b}>p&(F`Qx?0GnAPH(su{ja7q?Giq$R#+9FSv!3rFulOLz|GMrveA6HWV{M zksh<(NhK1e3G?*R(xOUv=BWynINVTMX%K?H#LGZbAcXxJ=?WU5tKkgS$bNnQ37kV+RQ3XpKY)G8* z{IL8GBL8bWz9zk;0BM2Zw!B6$u5hKkz2P4Iq6nJBK5@6@TDPclQ(!r_uG$ZhT% zWY3Un;Y;;N<{xwF-(?9&$wKE3$|=)R4O+)Ew4tUCn- zb-v0hL=!Xi`T68PG6jpGF7hNa^v7Q{tTR@KN->3Rw?PsiWxpV;K)oLpJuy}eqrkyz z({JvyR+OR@=hBlgtYH}Je$@dL(gL?A11<38iK(r)tO`ERbtBWYA4$keCv>jUR4~0P zi%h4gzL{KjE^d`L^X`oQJSS&o*XnwmxhbneBR1=Gk=vBNSgg@c>}15u$;pXJyMh7o zFmdm-Yb4w6MVPw;|Eq%Q2q?Y56O_=W*$RM2i_8p?sE~5n3xJA%BAXH&ez9qGlscTwdPn>!n3p;0hCw=wXh{Q&45ac_4Edek zORiaXAjUrPHW6HMLP>+nXkVT<$Bk~foqun>d(OAN<~~JEV=yM2 z6`#Ng%qT*qra(z5>h=kUK2Q^<1b%aC!yn$ z>>`yUtJtyzd#A-tW1bBK`#a+Qt6hDcy=OigOGv z%5Q9wt8@L((v&r7^wYkT-Nnt}>U|0d4hZOnuzSb8b2=6}blX$9=t8}+aKrlTSDf!q!J*I@ABP2(WPHwVmV3?Geb_2&ZSXrd&` zqT=)3(0v&)a(0xMz0R1dIpK@Hf9KVkqUfmcz1pIqSJV^&0)#X&ArsaFp`)f<52ou0 z?Hsu8ckjk8{@w=nMM9u^d6CieqY?|2M3!jWooqy8ug;io`wHwzd95@x=EbKHHB-QGa!*N^Lt~6iyrbd> zOsu1t@o{=^M-=cniu^LG4nP**@+;wr#{m**5lZ4<$Gj&FNUf%50s76D&K=Jv6g%d% zw4AWK$X!N~VNNS8atZ;fL@2-aP6oIoSY#XwkjAWHWLNJGf7GDiVnf$QNEO-0D^Gqn z@3bx)#Wy5Rr+$7shPgiUWmh`(BG}04g31is2;K@p13?2_BF$K`I3M$i^C}yNR(Z}t zW>b=K>M(mg1Rtz4)}swJIj%=>8^6SU>BmBEq%i0o*#vEMw>05Z z#?1wPf#GIh$JjS=ArTh{jdb5XAH_va*2L!Soh%*9D)|PzZ@46H&}CO)pVUDK%h+iI ziQ!A9m+&0bk422^%ZN7v?8cDvbS2vBO$!5-r>}D1$X#&_bCstTWB1X(`B%H&_ z1>=P9K9OX)KkvRWr&Yr%}+H-l4|;U+JwaUl1- zCm?NtGd(~J%nnR|8(8j`5LbC3+ZL4yqn6!F4_m8T7Y{x6@v#mafa?2QQLsNyl7Asa z{<@rqZ0zpKXUF~gFl1Z`oR8FN_LOMiaVSmyWG6YN+4Jglvho$YVx&+>p%`7op3>BY zw3@^XzrakOduUT%=(*0`Uf}k0LZ+t^^1$_jHWHyQn4p^X_8<6jN*Z<){zKgsuLCk2 z;YvWPH^Uscq$JMHI$FGj=Obm|uu)@vTbvA(kTS_wD72u#oO(fXbI;43WdU_taL>=X zld^|jLMPVM`)?>&kHKE=9XW}BUUyD6mxpe|=`wW=4b9#=QU92V@LMSZ1qz(@dy&8H zh^CiQXqjkXOGP?R5i=4H-JQY@_s7lSOO4!+BXT3P%XVl4_)H|>`A^zrA2Bf6`lEcj zGXq~>Mg6!Ps2O7fdp5v+4O1t7%zO&9GX8~%2#!zSkHBCy8PM1D!J!1mHd{_BRi zm2vz;`PGh4T*6BlMMVD=+l-=6vWQ12OjU9-_=gac2Wp>ip){GlHH(#y5UMLZdm>Tm zuyriDFoZ5fQn&K>C$!h#fV77(dzA`Hklq2ofkHi<{lt@L@K*UIs=h$#<>k8KCubU$ z^7rb}a^~)R(f5K-VEeE1-`ju7_Cc!I5UQIJHG)8~wL$-dV+%ge3Bk|U4_C2Ja?CkH zrHa55ox?%N=}zl}@5}voeHJ%~TQkUOT>YUk@_8z^do#5T^RqKvPyi%Lh+S21LN2RX z^S;SZ(KqxIEP6XU%Ob1oOM1+tDSiU|CP!&;hvc-m8^nwLoOVB^oGzr)fySkjHbNl! zp=80E!XyoBN7`&}Gio?5v)x6~_KAlHJ^U&qx8E6H)H+lHw1sB1=M=2j@gvE$!=rub`Q^naSeG1?q(i&}_Dm z)(rPlkFdS}U51y4AG*M({cPIBjDkJI;-}xGR+XkYOW~B&D2V zd!J1L4Xv>vzl}*_K6jr)$IObJAs-Z&h-T8>-cc8DdalE_Yk2t+K7~}wpu}ZPk8%5S zAHGT&<5|eU8C|%^kj8F&QZqt+Ufuzif>rnA5GghE zJji_mL*tuP%ojbbX@|=s*Zn1ZbZo-7xzo1Gu~2JtVp8~+`pz##G78FdQq!u$Z3DZa zh2i9$+6+b+?Ql`>b$Kf$8oR?v=&UrHKLB+6d`38Z_v@+npvCF0fl02HL(jhmOOVm zB(v;#PH-8fk&KyHL1cB|!KN<_@(_Q=#9+LmV3;AcVsJtYE6`U8#Gz8dlgz<2C0#ku zm|xT)&qT}8o@ekHP!R{SaQ6z!OK}27UV#w;#qO|39hZ7gu+5mA)cOkIl9NkGar-or zSpFSP!RQBWN(d)=2vFfVD(brM8ldii`)*an-BYP^3Pw&`>{Ls>yWV=2)7s!EA%}}A zM6tbz7m!u2MnBCU9Et~RRg-4-NyXNJU<9QII}WzypIUy0i6 z0Ua*8Ex}8zSEOS(c``K{qK7#v1DzIy#Hc5^MTXt`NI1wSy7aAr0q z3CmVz*iXCM(TIs@&*u5l4_mxvP7hp_c*%3Jy?nH7Xxbu`>g_r~Z`r7qFN?>Qx$KJt zchthF>iRK93_e63KKE(BizbM*i<}BOyqF749L?iCvQo$GocMPo(t;QN#D74(Zx98D|9}P+nwhxKWYE@omogDO+Rc}tLE(xG1d36~ z&nu19R-`~noeM5dmi9A-`X{jcgZG{(L`nqVHdAcnKrC%5N>ZCqMcFy&B&GgT#O-Ej zMFT8>7Muxu6#N%%#W^e0?I63R9;a$?u_9dQ*mgu;_ZT3oCz7O{4(-CnFIxt=tx$O! z{b})B^E~ih<}YOe=4tDanvh0m1&Fxh;as@osnHLl$rYN^ zEL@D0>dHpShtjCesnMK)?tQLX^JxFNqi$t4Zw|yGf1(h!=&E4pN!Wae9pDs^5Lb&W zy>erMSW00{kPs6i+Y{+Ri$_3iyYec0>khyJJ^j&XyOn9@-AD9LIEl?ZTxI`s1o9A~ zMEU9aqxSM!-2I;tWVj3Gh%-Z93iDMbtaJL$H75~O{QB`gcm58UCxXeH2p96UwNZG+ z3P)wYF}{VXDr}@j4-PQ6O(E}_qnTFoh&Oi|m1(UBc zf54L^VsFT2(+ZU!J;uLoe_i+cB&OPwS+P#|gQyWV2N#TQx5L|N`ct)yqsF_O`zqf$ zSP1<8)FQf!PrWYH22g$-GeCKRBBNl!kYg_)cl594lGkG@{dbKhmTKFFf)&eYEnx}X zTI`t}RVeoz+4f>4h=kpWs1E-c?uSfy`|M<$6(7dRDy1Y%a(gCB7Oy;Qz_0rgWVVL~ zg4R({)e64wQUzJAP|Rs9C#9E_q{l=d%P8H72)!IhF8v`;9b47P-(GgDN5|g$5ZTiJ z74}5_(;dR8>y5@?y&bzoq#jEy3SPW>iaMk3SD6_{CR8n<6JF#f5JUjSipBkjS)`z@ zjzOE3^pOyVKM7iv`H)@(S4g{#=v#$9xh9HOceyTkJ3clx-m)k9!Uj4>9o`HC%Vl~( zBJ5g@s&GDyvkynAIL0%{7^+H&05=3L#y14)Wd$ z4EcN5g`{pL7yrJm4E`OtAEFJi)yz!aS$E#<m102ONxF++> zbS31iSBrZ4vQC+jogEzG`&){|$*-lb5!QI=h*Fn!4o7l?yRW@NvoexkDU+X%xb!Nv zW;a5Tc3(dW|9nUjxJ9O-I(SVWT_=!cLdV>)mupl%xjrm*<3tb$*WqID(E?r!~jO9LvQ0$1;(K3=2 zl6D}k`{V@AZ~8}|;s!zk$5%+-NxTI>*1oQG)wxWV_0xJwS{*5^y<8~r-93{4Wk$V^ zH-r~`t>;^om%X}%6X*quLfGVZ*jRte&$6^#O)^6=Nr_@bw6h|j3mAaI(I_oS{!rwb zV@>e2aq^tsV}2Q|I{}+IPPSNcyK=!gtL{$OHO30yyT2WE8Nboq?#?mu`+HfkbTO`F z;65aU2O1Z1coD>A75ypJFz!?l8qpm7cg63TubD7>jStFR>m>x$=m7y9-q7@GMI9qE zqN++yk;;F~=k!NvMfnUt%ji0s|lbM`nK6+gy8l zB?`6ZUL!j={3Nq=t|-XYXxah)kzJ4TJG^Kuh)~*pfKvM zV-bu0g6|pRRZ9+~^B#wx;?l=F#%XoP< zYFsFxfQAK|*j0kZUZ;AID72mLV^$FfCP=zyzu!7IXi@lxf^;N;}E*&(s!}C zBhlfwqjY%jgbjIyNXp5Ka)+VWZ9;F}nnYf$bvc5Ci$lc5oF=jAz)3coucftT4Ua#J z==^37oAAvqiaXa)GGfU@M?oWSkfH!mTF586_P7|gKgn2295iIa!%ye5pwynJY zJPr@EfY$)bjjRrhIg8QZU%f$@6McC-kad6SNiRlxE7;w7T|z^{fUApy6fCp(%or|p z@Dc)NiS0(qR}Z#1`-M1bKS%nK5TFy``^HC4=t1C=aZ?^w@-H7ALh zqML=#3%&EvEi4l37;hEYt4u3$j4CwIL(Vh(_LQ;59dW3zbuidkdR*F}_b~6rXWevt zw%@k^yj=%#cz$4Q=eThU9l9?pDnL?J&V`bJzF1m1q!2HioZ@sHAJ^U~X(U8LCy1FC zS-)H__q;dh;ZQXmSGASU@S{7+@WAA7SHFB-l9Ck?j%2_fx0ff+`Sb!V8x5bZdB+jH z;m2Lwx^3py=7YVF*@nP=Nj^Lx6hv$6g9h(E(mNLh6F<)ZQ%Gbh;LfkGUysp$7MN2F2vpNyN7KJ8Zm8Bep`jr<1EV_&jaMet)tAxost_-^PUD&bP{YBU9~z|ZCnXTGl^|t2W)z2 z#$Tgs4;y7Rq@*DsV~~8#CfczrPfE{M)(lo6+^7j-EwCaY0r%(roGt-x_3Y2__7{CC z=XDNe-uIjG2|c4n5fReD3G zKR6kos7*u;uA^>7x3BxMnLnE){YvgsPfqh3l8|tD4cj;UL%V6Zyn}s#!bQ%*k5Nd9 zAAB-q(9Y%>tPZ9Qwj4ipy`J{`Y$HkCzx7AgT{TS~CeR*itelA?8Dw!p*Y6oSkSrx% z7x1vxdihcH*LME4*=q4zq(|hs&yaclQf6Xy_GesLCMijASQ{_RqDLxI`L5!M(n|WC z;C8q6i%0X)s-%lhb+AIX+M{0|C8@*A`z894+5n4lmic)qjcu4lQ4(GoXCd-n`lap4 zWkSk&tLakW;x1lP#V3yd_pTL7G_Z{X5jdj;mr^ZRng)7zgPk(5Ly&MX&KG)meKl&X z+PC@3&hn>(NV>)d=@(pAxy#{?qy6g%D1ATob3b?So^@x@BcfqQP~_nIx_gke`0m^s z%s&bFwRrGr1zbahm_w-Q=!EPWeZt!y+0ePzQBy_LNTELQUO+0E9LT|rJ=sLO9G3T{ z5a5EZ&wT+CY<`LR;B63XAA!Ru%mT{oRek>v^I`rlM7;hwyoAt6(ha4@*jGj zjCRQv4p;}9O%W*ATdbYB=}Gfvcgm@s=D18*Pdr{ua#AKHf~wl~f93D;B_jT*BZ2<( z9jHzZb7;yNRE_zu0wHw$+rM^|i#$kb$SZi+Lc&Tq1Fajdj6#03TjrI9518T-5?jO; zJ^l$B(U7nRY@R3L?D@V8-;KZsnbpWT1w7HeAi)2mfQW^v4FIA-S|BQ9^pHCVaXegQ z`?T^|_5kbi;@-rQ=Zr6H8ve6;6um_X_Tx1X#ovdn|$pfQQ5dy;ux(d2# zhS>)+G)5C}!EqP{9;mY5>nRBzaW}`ijl1*?O7Nr%H@Tdf&)&YgZGuf=ntNWi^ zwk9p+P|5=ZX5rzfv0%n4zvh5RJCdQ3m*o9_guP`{9AT8Mi@Uo^;}#&ng44J)5C{+m z&`5v~TpM?H2@u>ZB)GdnfZ*;DJa}-q#mt#=&dgnR-G4M|byfAJ_3i!c_d#Qsa>32< z*udv$0nr+=4v>HC(1IUnNe&tqi0+=qCx^K&AEL!#OIvwSpv1Ib0uY3rvckjDx#Y|e zT8fas&-WAcZcbQaFmBg}dfH*bJ3Qlw4RuzcZ&NrJ(wpkU1&l1~?E{{9PP;KT4{g!& z5C7CatrY+Ki>wqNihcQsCvr>mbEcz{+*N1axx&J+Y%wz6E@fuaScL#b8RW6i2 z59ZMjLziRMbD#yx#hdmA8U>70Jw@EU;Kz`O)7R-a@uZ1UD4|m!J#6y9iSV*A;aIqY z?yZ-elz)2jkae-c!%N9T&uHR3kG9;}`*+4V=4`_H>A0GeF{sj_YPXS8d|jvl0uU?h z0oR^jzYhX=Ijoo)qgKYJe=}~cU>YV!31gR#Q6oSvQaGceF} zZ}f!nn%*EprHLYq>I)9&S8(^mBkrS};sj`da4X+PSM7NlWe`SPv5go!dwOgndx~r< zRm_>LoS#etA@O_@?Q}jT5gJA zdpL#ehI1Ta)A1Pcr=~;3%o-_SOOwax7>0z30)1N6fpWit3cy-eg>%uRlD4^rzRE*z zjnY$-B6zptHWw+=p|{nv;z}^MLMoMh%q>G{U2Dd3 zwLkQYKLdExVmhlBSk5+Y=6Kz^>{C^JQpL|dEjxR%V8IGl+E-5b+jX0f$67_npH|^v z9!D}VL9&twOpGjyrkhTtcr^OBc5O{WrS~xmgWOuF6_p6#G)Zs~7?}8rPTPKnLS@Z1de5}glLLew;*3TFF4GrZp9Hc0__lVaHaOXq8nTaW1 zCbaai;X}c3Cqu<8fF9Qhj`LK%ih?APk=`c~j9WRJ@k5a%NhAtF@a3;;dzrYH(_aNtXd^yAL2BfsWh`QQ6k^> z)DC2m(&G`opY7Z85zIe{JHEM{TRT0*JPhjG3Q|Q|eZ8Xj9$Ox_IL~HRyU|X@Zo}co z0w;(`r!PTgwJW|L5e>VXLv%>^x&`H0+&Sfxtk5gPFrs77zle<;;V}@(QMvVFZdDGV zEgn-q-4&KNgb>)*vJ6hXxKt%bz%`Igcf#Z_v| zumO6pn0+>cx|Sma^WG-<1nsV2$Ss46kTtF;4#Xafp>x%;6W znXnaJ0uD{L@)jv3JXR8-%E*tTXbmZ^cL-T&Vq_Cl6S`Lip$g$5QE(ldksf|q;A@Ac zEOae5V0V1)hI!h0mC^tw*Yuukj#(CB-%-3K%Op(~F}5!V_ZX9-6W;pkAL>TL^Zut^ zJ7yg!qdUkmw#&}?yXaJK{*pS5?aISMJLWJ-s*lo&4S_Ctj4u=?KEBJXfhU@p<8AR}$iiJD>`?9QoC8 z2tXfG((x75{zlTSqip1YxA@l5g2&Nlqp03s?T1eC9+n1&h5g_rIeJHcYf9nn$8Q@Z zXylDq+A?Ka^pE{D3gs#v1CB#`ewmq}*{zQmMRXH?v0vGb0ATlYfC;X_~%d5^6k$J=i3xH&HRqL!t zFJ~bkA%}vVBwsVPJ&;idSceJjv_&S$E5cn6|E((3CYpc4uzkqZ?K^$Q;K< z`A#$OwBzhCSiRb{PiOvW>jVu0C%~=*Ui!2Dl^M!T)Q4By0S2(FX(vCinstXDB;*Yj z41XAgf`}v1il=KAb)e^ZF>S+UpsZ28yNdFW>U?ymQx;fj%f9sqD5_X+BJ1+uXKkL} z_tIlRt~A7TmCP3L+!`2r>Xbk64cRLhoci<19IElS;#)xcfm6tFTL=N+0gr$J^t9JZ zc6Mm;bL*AUBH{=W5LJkeUQla|sFbTx5Fv~0N<_(d#0ECLgt4=!h7M4XF~gDaO5l~Z z-bPm!;_2LMCd~7gT}DO_Z@5j^_ik&P({X-9qM1~9b(-eO+M3^wbRX%*=6`$4jNnfi z$KN+JQP=OjR1C;RiuYWv*~o6q>mEhN4kV?>(G_W!wmvRe9W6A`#{IBd=A_7MSqT&V zXVo<~hANj&j(W|q^|$Y&TOOSt3z+~NH!Y$ziEpA;xb92$+L#V3;lTIk7nq8U8#gdJ zh*vX|;lk{dd9*>;;9!5;r%<#Dxod-JWxb@6#a-rQi5nb%+*zUfbUqLk5l&Jk^zRkjd2^)njdEx-?l8Jp-&-!Apg#DHku!A!=C%;pc>nVr%9G0T zc(%|zEbqY1>PWO{pY|$Yl7yQ2_>OwrFk*a5>|JWh)Ac{Oh9^M`+)D1XTYS}?^e(x# zoo1?b6;tRlxuAgr|6&Uvb}=!&qj10fA=y?#;+raeV=Rac|5ypMkryAhr4lD?sxSI|33 z$qH12=Jtfp@ruiC7v`yF6Q#vRW!z~7-6efG8%IQk24=UD4#d**d^~R?ykJ6y4v~p! zWE)8sUNO}_-k>6U_W5Bv2_RuQ*UuXInm(f_si^d%TA=1BPBFtgC+-#IQv$|wpo5U} z2l^95mQ6y<@%x{)jmjgrhJB-ja zxnm5%0Te+=nt@T6ZUeWq5Ph1x zuoGZY`9fj~mT{KeAXdVJLcTu155!POExoegfjZk|IBuJsTnT+#Dx-!_4H0Dx<4N@ZjsUouP5ajTop;SIMB0#1AY*XLu?bQe#5FX>=;=+b7 zsP9&$NRvU71cV9pWjT1kvhJ2n282OK)BT%Gw5D@c9TRryq{|-qg4R!$7GBi{`a!a? zbPUfzMI#hk=X4O-COCYJ2@}zACql*Wp}nbV^Glxsky*RtAhopzoQa7l1(m3QzizH_ zYoWGsj#d~1ZiPLOJMR=jq%~!YNV3>_k2~CYG-NLF@V_+;;7=6^gh)KG5^PQJ-0uta zo?s72VMck__O(7x$q0P*^EX8SMg{o=3zxKt76X3$hq0UiNHgG8z10Jqy0gd(EwgR?*Z362k@5MVn>QQIBQy;P+XNF@bWB}bezMTB?~(D<_hg)| zm=r7Hc+zs%a^R1z4Jn?B_JX7WPoxJ{ZK}-wvU|0hyQyRg;%M;V-$46V<{OXYtH`{D z#cUi-0=;DnQ;9FcpT~Zu-*ofveV#>ACT$CBYr&!8DM5L+6${-tjAh`fMIjmUq-BZS z0eoB{k>3#GZ0kWxx}EZ&T-BztC3E9EAPN+65YlGP)=4+EU~Qkpa7X{x+H=TyoMime zPSqJMsOJwnupV-WO8#ybN(cz*l~GB576O9!e`#yACh}TE6!ifLxg#Wj+5@q1zJ!Vi6nou z@;ObiaaM|-DH$C1nrbqrjV9@}D3-B4cZUt*SX_0-}RnRopN{NDkuROLJjG1Se2Hoh^iEn*G%K9Kgks#H5MmPZFqd-PXOhIoK_d&bF z=q}iB!gIG_m5U`0F4ztPNf<8XGUP?GKr&<^i*z<%Kt-#nFJd&C*SA76{+_^zyXIsQ zdb&fiSM^>E*y4sh9&$V>XyCuh^d~W%9(e=aLr0YsU<;z8ry4nm4H5e^tvdD zlg;m6e|`2-rzf{>2ul`am^RnX{yC!uzgQiwki}g7iE>rWWA9elrv*Ur3VAq{P`#eO zm$sLlOTO9Onz?bEsjws>j0r>sXZ1QEl>;u z1Oy(7D`k)G)NhEhI=Xm~ZSfEwnghNJFQ10C(K&HSv?`cgTm1KffIm?qV zvRrGT_L6ReHi(?=j)}y(fKbg7Cz$-0OveYM_NM<@m4B!KYEEmBjq&m0m{9vlgQbxj zxfamuv?+V!B_d2q<}P#(Z`QRJ2_8So#9bA%m6nIsqaCprF3Urn1h2*feZJ z9r;P>Hb^}>B}XclMTMW>&CJZ8zxO_c#b8U@ z`Up_S%=F|8Jcrg!_oRvdQjGz*c(vlwg96`LA=&!OG*_8vx&r#~A{&m&V^35sReG4q zDy2}GE0wcAuX*(^@bBPSL?EplzrHBG!*I~rEPdRD2VG%1CZ zbqTjv*MdX)>O=IJ?;T>@msx~Q(8Nz~HVFxMubSvB7pjr9U){mAJghK4q4C{#yN1x!IPg_)g^YTP8* z!Lxu$YeG~dQw(P!nxX1urXjBS9*y$~ya6v@HciJ!l|x0-3@2cJ+Ge?SK~a9Yt!aI? z=T_TVk4!`DB%u`=!i1?~{4C))eiUMlMR|*M?s&oDG9BRSa`&Dq4LX-4bV)l;EOx&f zLF;>ieA3)FL_Ml~ATkb=L@qPDI@BSwxh^6`wQTvni!$0U9p z7YAF))trQed=B@oZ6kd4kB_k@b9S=PX}PE0jk{0L8T~$O23L8+P#C~DDQjA2$Id6D zOm-E(RD3=so#MevU%AIJMg*7^i3QC5O*7dNlvvN7o6T2LA?#)9&7}8Z`ij@NY{4>y z8fXswXhmgvw1V+Wc@FoqSdXY|_@F>^hIuaacYHBVudjN+H<}cq%|a6JyFih+<)o-R zK>E%^n=X+SI_>S-y%sBq|FH^B*?4e*w!HYL>9n*EO)V*ka3S?y@At4Uo2aB4L^8f7 z@mdym>jl!A&yN+B>fBfZ)wubJ_pDenXK1b`%Yk!69iGk1NtBT!X+xBUlE2_DX(y3a zUZfIV1h%$!B>mD~w4r8Ya^2vn%L6+yM5M6CoB5%hIoAw^Qz)$U}1+1_e{|*ww zKhng6Boecs_hkub{hSLF%rkoSxv~1|;nnU~GZ_A=m;1Xn>|3Ns&r6QVVvfsFyH>i~t_}uvz#6`_M(NUx9RZiX7>m$rff@y zY9p4A=3(Y5Xjia(#Cz^r#0JX54+4w_F>Xzd>k)*EF5n@dV%O)n)tV`)p3jSc0{h`Ex_-r)_EOvyZ{9x^XuR0)nXh*jSv9)h4LD5W z_1@uwp}HHN<$ap3a*+TX2=7)y8*Ni|rK1bg34j$d5SGWm+P8jijSh0^LCI;G~6O99gEiJjk$^^$%5jY z*PMld#bsMQZM}B~qTQH&_|S!XL~EF)0S)vT`ye>F8xcrjgri}h^^WY1b(gY(j@HLn zXu7NN2hKJ%?dV>!E5QVlF9Qm{@h~aU3c92rLzhSJL8$WqESJ2LLs0~)9_i#X$s4nb$02?dC415ZwsJWPA8uD4})ZD6+ISx!sHok?>LOz@8B zOkFPi2j0Ns>)+6`%MtmB>Y*9khsAeP^TnZkT(u-!g40p>gtT%V9>RuHHQSfGjLmV^ zBz)Mwdy(V@+xMH};0ETrL6aUQ)15;BDk?v?4!S{c_7GJsJQTvp(_2A%GvB9ur{IBE zV%KjZzG&3VhSBh9EB=jvs8!G-&%BX0m;*UJfgU$EE8La0kw)qj@w5Qp2s^NWyvF>u zlsR~U+~)H#JIU=^WFB^L1yv=b-AjGL^x zSb#HL+n(Sa^JQxts(|+crufqhrA%L~EcF(40=6$`? zv%h!M=bqS`2UU+8ZmFH00FUyc z>WMS|PcM%8zu~pziu#H_Ay~W!X|My(wO*t(@6T{{b}~~WT?jjS2`{Uqf)d$lce)yi z0`N^kzyf5IZgy1GBy_h>imT(7bPwG}7Z>H!hfgK_2+Pxjh}CZ{P7jmUHooAd-#W zb+`WW$>F$L^CX=lnB?`Gr6MvX+34DM9Iw-7p@N7+1>PqZvpLhT(jkqe*GWD+3AH}; zFO?7*_iGYn%Rm))eC|GToL_J@?lcRO(BcOKEQ2|HUHD(en0E~;lNgUY1R_AC@P8go z%E6c(>K#NLqNG62zwWpNP4;BYiJIZRr+9~}0`+)$E>_`G{0z*ZQHlHCfIT9el+BM# zC^gCNDbOSD#Gow>#)B_X{~Yb{?+^qM#^OH=UT?wHmqF|8=Z$ut+kne*|BR4FDT{%d zQy^A~c7-b@JqY4vKSy9NW>OpO)YRV2$Z&y~fbXO_1ma$&~5Y;pa8$ zWiUDO*j;PM6=np&`anp)b!&b@1?!t5>Y_^{P1C-t!g6*P!{^ZPgEZ|DN(0*TIlC?v zJ8uT-i(Dz(9EV7IdM3K0Uwi~vgAFe(_YX4Q8U2moR=88*8 zi1@&;3J%QzLPCQE>N)HAg0pnS7VKcJLldly!$NLe-wG3bPV{B zedi3Edj;bO&ff@88GLxTb<&mBC$M?7&qD4(gz2+(a-)lN<7F?E~+T{F=N z>usj8PkBQ?Vr_7g>w<~x|D^>0YBs!4+`4{<+4hB~HEdkW9OA9ep@fGLo_0?T+3We2 z=9?eV_u0^Qt?}vZ7)IXB_+s~~0U{_@owGkWHdoxU zi7}xUEDWqkNR`O;+aj`L)RBNz6+#}psF4d6)o;7OJh}m{`(>Kwf+(B{UGa?UdYR|r zOwn7MW5wR5Hq=}zNUik*h3u{Ch_~@i%O3C8E9r);%=mSM1SKzz zVlVfdEKQA8l<#LGkSVCr!jtNC>~1$)O=alvku_N~hF+a5`WhX#?lwa0$uwkshg-rX zu!10HIZE?p_oPiI!Ii8*ViMW=xDk7S#j*17n!0jBd?SMX^Vvw~`PPkBRer*VzRU04$uCZ@b)CR{IHq#p(* zg4IoqLCRr}yzAnVHNJoV8$Eu9%}R(N?pjpK-F44d&$pZV*h~OM4x}bzhRwO+zh;HM z9UGI^(qiPpaA5ncpsLcBN)EW;%#2pqo%n1Q>&K@X`mOoi60)&{@&tjc5^|b1Da+k< zwYS%r0Vm+37YBPKY^Lmzq?b|+1YNdo1lb|z87V;FqBWgL4h4Rrx=%XCD7@TmaVzw4 zy0T0|i1PhWgua_RVN;WRIFuX-5cr#6){s4&f>ppYK*t!%q%SE z#p&V2SH)*aNSjJiz&Rf1P$sdIJvH=SV+j#7Vj_7xqc-?j9C&fqH-A$)PB+rC^V|>R zV6Z*7n^0IdOB=u5sWz56HH3`#4bD-1hKSwPKDjAAHFfwW<>amLE>n9Un=oq)Qe12= zN!W(E0{`N%_xda6y;+h%M`*7gF5;KR(-TL@zICU+8Kl5-=YCFCr~H->gHlD zQ}^GjjwR6w8Y&^K$-^5{d~(8=h7?X|y>5-0d!&cqAVs8nb<=N6Qev%`Hkq@YduK zCA8NP4xO zBYrn5hmEIoq2ZwQZ%mu&UhQ~-h(O^Am$kkGf3li8SbjDakq1~Ib(;xnt+&enTJU7c z?(x#=2-#^H{in{l9~FS!S25`LVJp@Ew)>Uel8896pg_jCBcIT=`Hvh@2nyWmKP6i6 z=^y-z1ndUpvVY2(nKhy%nm6dvdy1TmpTb*`(S!uGnMqvW(U7CazLPQhE*(0_2!b#gPKL!v0iL$o%r(A`>`wT>o5!C~jz2@}8Rr7pDmqa6U+VbH+dM z#A1#kC1WNeuKUEBfKve4l6lp9)r{|beCY4%`nUW1EiFU&X9FzqM~V11pBFezx}kzS zGW6)U1Nq)#-zToyvv|_56~yhyE{Zs1RT%#)YRICASF^-pcCpcma3fA|b&uD{vrKbStkNBJ{lrTs z4MQiv;in$R?!lH+D`>Rs7xVt^xO$TP|&k%fGp`-*qwwf%kBovOMlMmtdz4jJ5q%@^I^zHVY2*oCIJw)A@ zh7s9#M?1ei2BFHYu{oao@P1YXQr)EHb_}fusxapx1_g=uaQ8%)icKlg6PaQN_1Vz9 z(bllqzl@|;O{!tGX8A*Mlf1~8zf4eW@G4J z#l??4*xI3?#(&3v*Dn5~9!ccY=*Yi$MeVmR%zCb>L95Gz$)`xXrB*avlYmRG_3rHa zO+}uumNb=;aF8MHasW|<9bY}=T@?Q%X)UE=jklf3T54#AI;u!9pZym8`~dBsY>DRdC1RI_ljw1xPzTjPHIel8`y#w7B2#4&mE2$%sn6Z9TDgx! zUCH5JM2|e_hWgVh{%})lo^Vt{*M}=yajz3o8TlZ@2+++EbubT1anEJWrn_Z$ZizND zA;x5O`xbP$ooYcN3xUv>N9pru9nU+(~EP`$b{}-@C1}cb6Vop zxBLTe3*duVm*sdp&m}m(I;tjG#J8ip^F*6dc3=sXaYNX8*UW*hl#YpOly# zJaS1jZAXEWxPTO$H}OGn{Eea3V^HTGzRn_8{vZ*jsC%o;K92|+7Ad;2h0#Bdn&)qm zgu!pyaa?iOjG`1l@}U~%lTJ0{<)99tBMhzc%N%6&QS`|MhfFEX;6 zP=xbDZ{~NENxuRjF%=8l8YrtAq84S&d|P}*TnPRYEjay4MfYZ~a+TYh6yF#8;G?LI zj3>W|Xxt+VP=1`55oO|KSzvb`bP0Ci-rgcBgiBy}l8m!9S5;E$@i4bkCSE6g2$u21 z5>fG!4gr^iCpsE4ZRJWKj@Q|VSTEM1)3#nQ<>luG>2Bg|$oX2bFc(25FIn)HKvW#b z6}@AJ&vN-C%pmU6B`}jm@n=wM^3)!l`_jniY#@V@HrWU>eLyOfVO*)99zI__dq4(- z%__KLJc@+ZhZ8v|KP-rF4qP_`-_nDVKP8``^Z3l;ujN}aMaev*Tsx5a!I+mw@*LDm zVK#%du*ubG(R&lO?A|HE&S5Of81N}`MEbF)i|3n){UWiDHQy#9Su_DrzEU*}A4VZ+ z@vC?0d#^*`XF(ihvcW@^A#DmIpgO7K4L@wxKxDZgqzbUngiqvKT8J#6Y4{Ns^pp@$ zP#_&<`w`a=#c2Bvaktr}tVy@j;FDxx@;~GAGdU?AdIDE!`)59#IfZ^&9Ay2*ROW+^TtRWV#}sG{E_8K2P^Dch&h9_%z2sb=4@L3!Nnz ze`ZGsPwf%VoF{HnemC)Xqkv+mjfQL1EHH}6GZm-;5yD^$3PIU8D@LUk6d@&)83w!bqUZO<-y`MRf$8bGSS1x0b`OI&21;f$z>g z>>3^>eM^3;@VF}k07x`Nx=!4;P3}Ucp;=4i4rZFaa!^Cf0FqAuHoSJT#9kBLXPVshf@qTc$arH}akt<&yBMiG)LETu=^MF&8W+US=??|ME* zyyt|7aNT(+`|A(AQGP?RUu_+qqRYvdStuhTW*p6KV|1@pSz*x$D?gho zFlZQNthhkLyeac+%4xYOXs~4e|fzt4X zc6vLja^yx5X{Zlu{O#aU|8ZrPRZZ9SU+nxYq(pG!N`fJ3SzR5T0A)DJ>sIf_UK4S5 zI#_z^zhF^Cqmud$k(XZFfcf!N(3@|qGGbJ&>3BP>QQKT)+z|w%>7x9MQ!7O@GyS^=extKm9(1c_>XK?| z?a)`k>e*?G`FQy>*-sK5y4<;vn1{w6IuNw_R|^@lmu!mqZ?1*$Nr`1wz+|Ek%$ z!^ljBQ7Ujhy_}ZQSz&Fp$_xC0c{##bP0I1 zK{g2wJJoLpcm^*LMOT*HPa*nI+iCI&dxQe2^aPna>h5V+bW)ns<9kl@(o?skZ4qP@ ze3`Iss`ge8-5&z5Z{GC7C>uf_Y!p zuRp)I)fw5f$$aF$)AMhbP_FIzxezpv#kjvUS-Oxbte_H_YUdm>?E;w}v{n4*sC1gQ zWnk!H#==J$;TVvDlHL;mQqH{@892hM@!#Q~7@me42(oY{gNXDX;U4L#m-YxVQY33r zu)HSCktIlT{rgjX_TEMGP=g+}VYcPjV$_ERud1^{Rd7y0J-Sqk=$^t>u6J5~rhtyo zVJz|Z5uWpJme6EXj)bDqI=dd<`?6Fdl-phADDxAD1 z6TtMxpa8wkN}}+hI9}Q*Z`*GaVpG^OS{TscYlNnMjgoQKmQ~>Oz9whT0|OG>pNT-r zZFfc&1vXWm>4lB=IoxGsm{aH3l6-#;6WO&O%~4xhJw@l+Ot0whFMmfn`QUimnnBtw z;(!kD!+yU|n(8tCzZb#6SAM0ndN?JQp+9uVa^gbn2)|Py#7B=4qG?IMk1+W(A@S%n z76uEyFI;}LJt~WtyB#m1_U#a}5>f@-_n6EeDa*JN<3dSH{bM*-*atqh9sfhAhewDR zUq-{M;xx?RT+ZQ~*q&Ue2P-%7iND09Yrt_MatR-7Ak`5>Hr7@=)4Lsi61jet^;`P7 zk#+S_?97+`X^)aC!SJpqVJy>7VjN_08C5UTZspa?;*};``8SfSaNSDwhh;T_M3@ zJBezC+9o-lU=W~wp7>bQXA$j)8$S?r%+Ej)GIZQ`gLnB)9N(Cn7;svRc)W{uF=piE zb-=E6QoL@*4@iO!KP%0n3Jw(Iavc}4=c1c(OM0VniCecY)5W-_F3wAUs726$1r*>^ zdx~)rqT_%h3zYuN1!KM@qxUQE@SVyN2*zjDkThzXbxPddiinvyRAQJ|!8CbgA1%`G zC-cj~f=z>m%fBktI~}Ly#e8nZ$S0@p&%FzoTZ){W-<#=F{<;bca?98Sj*Is)=Axoa zqXvA(-4{+%KFw zKYbw}^17+NRV67bA`abq=W&IJ=*AI03!8V|=@`gH8*E0}3O0Mk>H4Ns?Rk2)PYsV> zo7D@ulm(`?43T{7PE=X_6hJ$pN~#uQFz!3O_R7ZuQXr6>rR>7Y$7mJ^>SmiH!Cz4C z;AWMZ5T|zx_YBa?*1>j;dd1$%B}Lc6Q*kAc=f3i=6&B zMVXn}?=ScGQ(x45Kf%7d#@_u~ylQ+~`0X$gPRl^}pyk_HmVm$1D3be1RX0G&ngP33C}j*`5#k1B8^qf-Cx^V?lAu3C;T(+QB8v zM}CWWWAI383b?v)eO36@r^iSLW@;@a6pM-Dfa(0+<>!iUOHe0~$f` zvZEI%tlTvvKJ%FLGc(BpB=J^zYCA7V!J6OaCxL>rdPT~B)7wa>Np+M|cK2Ngoq zk#*$Q2$B*2Z;krvkFJG?aGcs*$mG{V`n8uZ^Ho*klf9e75I4z%Wj0WGf_Qj-#H=j{ zU*0IVjCo)y^~d@o+lkwOG*aO1thpjt_uMga@lorpDY^fd>vj+Gs83wYc{U-tF;=L> z^{(#UA<0}@ztS)mh39%TsW6yTG=D6$vr~CD@XF(zv4e^ zQMvbJOclBxPcC93Yrb5p6+Vw%Zpu>`^l@c;b=~WUtdOnxm$4v7x;x=&e(&;S+*9-o z2X$u4YFHTH7x`#R`5Iy#VgK2dRaB#eH$@};+8`fFHm{cY>o`ktL8dY2;hZsw9#=cC(ou`P)7OU-V@eba7Y0>heE zvhr_crm_7ji&x3>pSgu~Uj0aTEH^qF`LQkPDiHfzb^(n8okFXWOX`i-9*5iM}V9J{jwLVNTbp3s+vHqTD}8n}`TYUZ6_fe~O(n z{Vn=n7>i>9_#DCMHQ4_3uzdF{5`qLuAN6nZyvDRUfMnW&BS}X4+-iS+QfD1YL0Ev| z`C@)#MNJLU261eXd8qBWMH*k`pgpb>%kvq-k2w7Z`O=T{>ILwa@;fRPitsf8PK-%K zZ95_uYfkg~^DRr`VYuNVo0d1B+Exn8N?Y{bp-0&rP@3@6%Zmy9`b(kA{?qpgIPGAO zI;}Kq$6PJah`mx@4;{xRk&AbCnukQm(G3yIITGIcS0re$mV=739L>r%cRm9=>KyU}5ciUd;ma2nK!S}{FVJbdbKbqrGh1@@3FSZ+VnW=dg3b&! ziq@8L`ng*Uz&_SJafHSXyielj{Xft5(kQPYU%DeW8kb!fftbrr<2TP>0$SuAAmt#T z$77qDZ<^iObJ9yCEtP|rBBAMllHt<1@lq_YfS zFq~_q7Ys6!SG3c;c@exJ}3^ z>v`x&p>WJ#dW3bnFwzySkCj;HS^(gk!T)Jr;?Ny<;Q#7+IH0xw_@G4ue+BoQ_1tUa z8Kl07kO~@(`-F@Vh;Z$>Uyc{H>7+>Thjopot@!n2`%SiCh`=4BwfTC?W2R#ZFH968 zRZ{kaXARoFsQV`Q7SDcwus!ZZ_Ps;%CQggdO18#N?(3r6hdeJMh-De`?7#Ojbrq$> z02t>j(3&{ZBPl}M{t?8TmLE&V0-uvuGDzoRO}g)JU$di`vDlO$J6t~lq+LL^&*J5e*4RKeym#so6-m`= zeUiD%|24J$tAAg;45Ob z-S5=xG3E^2#6Wo_cqAmz!Y)=@7cO5aucp)tvY<3LvEK*&`*cZ_{0eYkb-(WaVxPi$ zL3w_E-9LhTW(nNZ-TB)7PpafTL8CfNrSfn1etJCRhQJ%^LP!n>ndjH_*YuRYk*JT( zjirm?l10zOpg0N2>ZXy8Q=pe?#rw~{nXxHh{vi5A9a*|?%CwzAD&_K zGXq-ePp6T3QF=5_q7mgzmFYLK5=;ywV<>R|XhYv4v`-L-{(jTe(#$QQw z9gKMlw9UcspI_O5l6@FZ3c9y(DMn%asz1AcH)=dzs6gaeeQNBSHCk{@0c^K$fyTkfxt z?wHUB&O>8@F+v0lZD)-LFq6_HyJDP~Vk`GkaY}Agb7ne6+ioVS_dTErA>I{cPGPHVZsGH(f3wL&p~l>Ceq zi`M(#p4?_b>SCjPFIf^y0qf*en^?%lcnQuU!%rsq#)%+!Bdg^G3ni5(qYX}z>8t%M6$EDNR@(+cHsOpq%#&8Z|Ds2ZF* zi02G^8Ai;xg{ZU{sYLxg*>Ov0X~~qpzWG_8K_O=5=9_o*f5!_CfAD;3)zD3i?F|alZ89MT)(&ezueblb zU)8B!W-ztqO3l&;0@*Iyg*E1%#rMQ7)BbDLwkwbSGhHVQ)S6=E+shb-D2%VGzaNZ1 zH_awPOrbF zm(mT=UDDlM(%sUXl7gflN_Qi$0qI6k8l+q4@?HBm=iGZ9&%N*aeSgW`dpXyfV~+fd zG3OvL#K(CyI5Z}^Wj$}YP%{;slr&J!MNh>jfS}Cq#^lIIPyVeN&!2tCOTvK{m51kQ z_mvw?0d|Qgm^r2<8pv4{SIK z5bFbYVloIpPbNSj*7h*>5pg|H@T8xu%b|OdkAPZ0Al5x7JsbZl;!lMM1S^h^Z_sGx z(;%m%UPyNVq3&AgGb6eZd~Ws0^q_#?|iSFTZd~p2Lp5crB+Q8nfiLoS2!}KRCh! zxjGJ_rhA#($JPWj2Y(~jFw_m-PRwERfdy@|1QX-qw2!j|zleQ>WP!=vWJ0fM1>JtD zwiMDH@VF9m`{NH33(Udn3^^$pLc$qY$su2IGzw9HgW2dNYxTliIkT<)EysUTTfm1u z%*IOy>v!E2O57F@ummGmA(1p>O$4ayMjCGlWZ;LyVnZ(GRLR-p-XrC&&(KZ=Vf|Kc z0?eS++ArTfMdmrw0X7Kx4DfzDJiAK0e3i;9EkM=TBONDvTz-ZS)NNS6>vrJRN)VGq zGHtujdI<8{Rfsc~J#+K_8!dnjCA3iaqb1WyDARinB3k6zzINLM;lAuLeA%tM3sdyP zM6*Br`qdA(RNC58VEgp}=a_mlg17C{{fA4`^zBn%b}a$WMTrQCq#$bwKo`}asHaTgzg=jIsk&VcqdJc#7+xz+BoXsaYZ5G7P9-LS1Us zL}0%N2V!3R>1L@mLI-KQZ{}=DgpR>jwBnY_L;?IiMw5KkgslIr(N0eThDdMNYWe;j zyTk}Qu7J;wUxD`z`KcrS3oWpcCMA(x$Da4O({oxv_;zQ?*@89UwTuKdQ z$7w=CfTxW_PJWdkwaPA^5fM!%Vgp;@MoKWBhnxe&H+hliRr`=tK`L6aQcQX<<(s)< zWA}eqrRRU{Nel3d+w5$-QVc^dG zai^&SWxuh&ZEcPacbtFPsuL=iRYQ-!jq`1f<8S^~Fc$U|V$jDZ13J0?9FklTm`l<= zN>y~6m=*z~BqmKmF(`c^DSQdq^FSchg<0-R9zgO#=SR9PmWeDT#;f zY=7|W#h*N;i2{9DtW%kGI*^G3+&ZhXr_n!r$^QItj;Osa%<~~xDOe@qhF020f4;yi2 z3QWS+WmofCS^D4XL<-&WSde^%^q|CfD{YKhgUbJ6EXqq`-0OwIfej;{#x_K!SJehD zpNF&222NeR$k!Rtx%MAE-(G#r%aI#5tRBerT~lk#J4pE!Qj+tjEC;TlLLg(pUkzL( z1ycmnL`IQev~Lww_5H^*O+k4Iz*0JlXqoRtJV%Cc`yU6?L zirgL72_OS^By`PRps-zqf)^8CEHWP80LNE;5WsCwhgH-9>Uoc5iK&|Zq-zMH7{rnj zr8=YT``!oLw#a|f)r2FgWcCo4?ns~u=F@Xls(4|@f=|}JVju-4BwT(zV7C@H z%eX?Aup!Z;kJvW#i^~48+BO;g=~gi^!2WdL$Wr{=+mJ5J1bF+XWm@++%)zc0HD!() z%K|DuXqEZ-0z4mvoQ4eyJB-Af-=vNOOg^6Eb@S4PGP`11Hl37zbBzDNWG=iRbr?Z% z^A-SHd7=(5AP}Aue@A;~19}JWam~nw{sHz71%-79UkE>Vkfl3df&CD`o5@7B!WxY^!Vv*C(q4bJm%Aa*dPRoGVaiWSb z@laN>q8fGg&jnDoziG)bHrSs+*uO22fnq8rO2B(vR@KQut%cSir*l|4Max2AvZD4V zrU9@5v?{rHBz0ov7U*3F^Xq}Y9sN)J^l zL3Lo-N`JaF=K`51+(2rCC%`6TE922ud`T_^>H?o3llX(j>C zZAa8qn#52^LEHFQ`kyQMhl^#%2tAi;`4hk_i13VHXq?$|=z?Q70b^&UmtrYubMG$+dE&kBPfL#9J9F?!Syk(VOetvQHtyuge&R zRXb_XZB7I=5lfySX=mErzdHFqa<96Twnla!Yi0dNRK>`GS}9OZLTH$>l~XGN7+0&wzST@|eGNR!2&VRpPUa)b%@=AIG*R>k zsYN8pHiL%-+M$BMY!iQHv`n!^KxOMT?(L{MS^{I9xgOYwQGI79$vg&=pR3S)6p)ueKT`C2v42hMLn4TmV>c>b*IUTZ3_?L4eC*f zF{xlYLbc!OIOd5vw_3_{HqRh@HM04pRtYcNV(DJ!iBM351ET*Ssx;ain-b8Ih}e`M zsbC~5SZXnFGz|1Z!8Y<+n7NVfa#W&W@4sbmR$ zJ*#*m02pp)ylYwBVnz{3^W z#NG`!axg_ireuza8vf5xVUSG|7uhtKs7?7mw84w$bXGGM(B)wWCX^}ubHV>LL_3Wb zNVom4j<=c}*_1Z8@s{!`SIMCwUH z*rSV2asZ|;9ZoBcF{4n1@}GMd0+anfI0NFgLmzJ{nOOZI2UE{Cm6%TfU7zQJnlg(A z7@*5b2{u)-XfY;$BP!CxG&nFMA;uu0dk!1kZ223v%)H{*f8&q8$Tf)vOCSEa@jI7a zqwPUGp9=nLdzReAIif`x&Qpi959y@c{l zVjal^kHNCPbN1_4O>E73$%F&Zf94v+1i^-9w|+T9KeFiAyG&FEh)18vzht4~_@?2p zRUpBT(yN6^3EfQ<`Xbu_)yC*xVdRAXlvEgNm9jJ)e@atK*imYIQx1dglJ2F6f9G~! zH6cORrmo4Ebh@vcfK8`eNz7`dK{8c@n9oNkV6{XSTc1eE!yIP9CF*+);Q;)da_nb! zQw+GIXL!t#6kJQ;N}n2YKlVA{-zDlF6eCBjiFuQ&JYQG^9FMY3 zqA7AbJcY+F6h4^XYu zt@=r#Eb!&PA#NG=pS)cORWNfl%5-_zVzNMdG?I0wX2q7nL5u94uQbYqsnCE8OFiiZ z<-?avX$g3W5ur!;N{i`|>f)s+u!0SEp+K<7AQn==lGRAQja zFb}X+7*YcsY_{mer>|_o;yCPTyPxEfzqr~dKdr=GsW;^H&4&f^?l`o~}3 zO7H%pz(NG@i&wW?)Gp#G(=VX$rSfnOR? ztQmiRkoXJ#Wjxx0^MKP&kXk4m7ak=WX+UFu(a^2OR!2C7ZvAn~uabkmPkSAQ8d9O+ zV63NKxCo30)*7yV3G5%B>&J*iFZ8HgI)P7uChBR4NDUg$cYaGOvRjG}TS7KPcal_b zj6Qy|T`uDI_d$b^4BNoEM6d*$luUxnBE#@w38Skx~K89C9}cKg=IuQBWimKqximHMbxkaL*YTH_xNX z%E~u3d(4gbf?&x2ODRasI{SW61IA*@faUplmjg$aauVz(vwxbXj69|IdJ6+IcK=o} z%Xw!F(!Xp}X@f@|phM#r#6Syu^+hT;AzpyY)_BcZX>M5K*fPP!8`(QFegZDs@(^J8 z&FP4pg+))pz<{`@2Ok}evTHJ#-IZJQkz@V^4qGbkpH|^nOfXvNQ2088GM_~mM>1oay zzcG@*lu@0}i@fyuFETQJ6Exz*vFxKA>FU$_)c&$DCjS1+p3Puof*`5jrEl|hv4YUQ zbEjA~u4?D*yNoo`AL%;4ec7!189rz_Xk@Xug5K0DED1rMyRN?5t7&WZOLJhdseJi0 zejgnZLkn9mcAvy(>I^U$AQYz@(znaE&372(Amijl5Yw$lE^;kJ+F*227fzq`mXSd5fGx&vf*?>t7HslnjR#6Iscq^n!&IBHZ-J)cJpQXqL+ z(ycjh9Qb@ZG20F3(iHNinHz0dfM1w;J{3lb8DN_d3gCj?e;?;VAgYic#BEw2^rW|3 z1hdlzEG7bGJeEJ#`8AY=TAFD^{?|@EHyg4olO6+P*E4*Vk%yl*#@kQCex6&7cASd) z__ovVKY!aUc7F9!Mp>D4F0ct>DtY5M>&qCC4u1;>B*ewtQ`%J?+v}kN7r231HeW=T zqaa8W4=-n3G;wQlQ`JyC)Vb>s!+2E$OA|ED6|?uRS^&Z>>lF{-o*oG`Ev*Qnb}x&y zr(RQug6kEFF6$lBKYSrm~7~i65f(3X^#_|L=OnkP>#Av@U*n z8YjNNeFDxkS`LP(TDB4e*H2Co^(9%BS5{OF4MVS2O^|hT zbeJ z#(ex}aan)Vq3Mj@=J!KA=|tBJ3eW*^V!fx9$3J)Y4<{i}7P7PVrJ;eSsG20^>@7R9 z!IelMH8qt)c!jwW5uSZ&;WTx8rysY3qy*5D93QZr#@^})M`BQKdM8F&36x3Z_pXi- zlqd=HuJ%#Okg;t!K$fs`=C&^^5E&-tKntPps6J@y^+Odc@aa;vJo=iNn(I*%fsoz& zpqSk}f1EJHtATeWRlQLZZxZK?eKCanFE=Id>X+a1yHYnfEQaoy1d#EKX2vV$3Ro;9 zc72#a(rsB6KM}b2oF6C-T8j~`yKCC&F?_QN()%x5%pXapG$*WUc5v9q^38~vu@c37RtpyKxK z^Qgqw$O!3L;I*)Jg>K*0`$cAjrQN{~`hQo)u=hCp*)RlzxTtZX%n*`crdkx!ZbKOV zd@%dH7yR}PB|s}0z~DwwYZZk-J-aY(cAg|8ZhBK^!st5MU&#KvQ>;6gHD?xw#yD2dg+Q3waKduwpON2f`}NaUHefkpfx86A z!=ibqUFboMG{LaU2)grCxwU(0bO;MUCz&JV*N|mF4&R@a&-i_il9=^J*9=-8jGXox zx2r|Xu%+3%n1wJTj*{*qQTrJNz*0dA=xc=2LL_hC(1P9!fBk=##l z=y#`JE8V<`sH2#R3;W&mVH7@-j`^cE>CNqI`RbPWY{Q|AJ8Xr2a~06s@&)h115w zaS0)$N21nY-#w_11$MLOA6=1DfV6FyxfB70#GUNrkN30y5V1h(3 z0fez75;5)|IqbY6U=zAVL%<}D297UdyjnAgS*)+xb;9sIT}5ZGVer^rCXT|O=usgJ z>=X803*G1#j(FJY6A#BQImMK!sTaf4D+et7*d?-{zgSsu2JIsx8u}4;o zx|c4_2S$L714#@7Z2gYSt39IaUdOvPmd66ixA`xZ;Wy_Tmt43vhzyMk1!wpR`~Qw^ zgzVt(1NV%gBKOIbYg6&lrrC^1v5V+o6f}GU>lpE%gna0x4dx*fdb4TDyC1NQB$SvHlrQ8tY8HA!nU&50W^!k#e0At@0uB$I&ZC8TH6X6 zD$)#`Yme(EM0U$12!-olW_fsm70?+N2>F5gz4q3rm;*e3Ha$6KKQvSdZBC6#c!&zV z9jd7rM^1?T2Ce;WK|cLb4;F6o958V1il7L(Bc6|^=rv*_7xY5=wIdwAoBxL75%FtL zT^)#CgDfE6O>$aVd;}_?*+Yw3rm#P+P=`>fzm$rKiskN)&6VrbT_zJMq4fw33Qzjh--?Wj-nG*DWp(O^jL+Y#bj)oX^KgYiZ&0Iy^+Q zi@vptL6zY%6b%~~y}N8)tM$JYxkx%qm@2B?Jk`?HHXq5TTvmUD$Fet7Bp8WFIv&BX zLw*kFhP%01LMIZmrX?89j{&X(l?J&c~S+%5gxB7T%@JM&Q~S!N!|# z%qf)JT)s_C9!s(?r>-PW8@|1`VD3zcMTyd{44JYOGd31~11Bi~+W7I~YnK#PcSS+X zORKywVk{%G^I}`3rhx?A*~U?c1qCJUV4^anGt!?MdIiFG~#U0M3m2^|Nl+9fXXD8KGU?h{}xv zISNhYtfhgK#e>Y)m1D^en0g((9*#w7U;(!Ldxj_gGwMUZ8CmC(}SsdIO$$j~EH-)#Msr}E}Vhh$so--R2Hbg@@Z5R7hLeU%XE zM#jQ=aco73NP=0RxDBB~{4{QemkbmMm^0HfYR9ov0WC!E=ERsF=g~a(zkozOz(d`ahPZ9|@gjZ@OED&tKuN*LhKth&*^aANYEAB32!M8dM5O`ArF* z!NnU=Hr;bfO_5F}!`CFPde4abNvk6$l6YVHG3Q7Uk_RFpU9%I9`E3<<>*xF3h~_22mco=9U!Au$%bNrt10lGUb^h6&PE4!AOTo<^G6u^Ax_;V=EThBFZ|{4 zsN`{I)k4EHl2{5OK3jkyY_YqOD*fftsA8#^38vbbXHNA35sI+0d7WkS^j@f|Yiw*D znX_r8sUSjgEQp7yb{KyWj1UTb#r4Qg*olJ@DT6s+M_+9nG9ae}nbT{>G_IjlX-e5stck|bEb&3t@;ha`~p{D~0EFGDXeZ2HF zwbww*kHbM{YXzMKrwe1^pD&35z=KRluMs+<7dVjT2n%#?U{$va5}+IT7uf448O#j% z9x0f9Km>4(dwgJl{@=Y10JLs`@|3}o=3Uoc{soZ{p;Q46)Jr2tD@CJTrvx7zgOc@P zU3H?EPY9bqsXn7@PvlGWHzs&+qvC$Qrz;SEoP>3kM)}{pM;L7WbYLFb)Xx&2h&hox zfrjEDstuZa57Z%aP)>ltZ~D-XhGihbjUI#nm6)hQoHA8ZsP7&Dt8w)wb2Xqs=@18; z%4U>@!fo$nfqN{0y zG8=o`0@vy9UrU96Wqjv?&cXfEbF@tm0EP+h)D*C0mLsVAD{(X*JbUv~A{~#OtujirUNOW_1Lk8euqbIU3Q|lHyx3`(dg^i?|7XGr0 zF>oTjlWplArpM(V=XqS8MbB+>CMO-)z|T(Y?AU&khXDsRBv6AL4`CCy5t0O-4wDes!EnZJ^pDhQy|<1=pj3epz$|m0Zc1IKCx1fM z;oGknx&_rYuJ@64*3j_O3>uyqK$*RAFh8g@k z7u3fIWua(6<9EjJWcY2MSS^Z;+Lva|pi>;OK5WP~6gmpAtfV^VH%g*?1H=b~Rl%2HGu zyTPHdv(?sChPRm&Q^+yt{GQ?ojX@_bp;v8=0Bm(=Y_nLEbJUxD@vR5l$4o(ir6dh6 zW<9cL9(rH1;~JHNdLx#EwcUUUqsoSmSrgyxD$>JO_nLbqtqVx!IEO1YJn;=&N!1zI zlg_O<21x6#7n>?!tlURR3_W+hr~N{2Z0wJC3;)zvq24cPaM@pa4HT>Iq^m8S(2cd8 z=I;A?nHBZwr{7`nbZRhZHJ%H<5#rWzwl_FjlPKm$^@mh4_rJ(m8-lWhcQ&g#H8U zhNGhT8v%M?Uj(eX)XSqG#K_$Ei19$+2idm;Ka+;B==@q5r?D^2GvrxBE*xYjuU_BW`RDI=cKnOmsP}*dy&2) zGkO4wr_I+*;;q`y^onI9C%S{rSpTttG`myV{p2WJ;VsorhD@FE7P3ILrTHm|kzycY z35ikx(*vG%$65gEKWqw9kn^@3_$`-eFt;irZb;A4JSEbVW-f{(Km z_tM4?Jv=#yg#^QJhi^F89o!gJ+U|`7z4NE~VIacNZo-==?RSOqd&PLWe)!Ey-qK?K zQrC#7PGqBZB}IxJ#6u4~y<&TaE}a;Mu$$z-Is4&tj!7^6ie^`S&?{1twIhwtiqQA- z3A3+$q@F3X61qu0huuh53LbU(8Ng}s#k94^^_4Hn%_at&bIkl0Z({nP?8|Su(Ju`; zwC3)udUGZA!{W^B{S_92a&lW495+KNk8KVqiX9(m-_wN?`K8O=E zbqI8@IZiP3KYGO6Y}(Cye{^we6uQ)C3N-bcKeG`CigcqNzO-3lwYL0T6iN6#>exT# z^3@UXX_o_c-IGnNXWEm_rp&iFUZ@TrJ<&W^L|*v+lF@Ov3G_(3DhHBYhd>SyT5_Nt z#+fb-eikmKA<7Gws<(-Sb_)r?MM$RB_3*g@o*_xqY?yThMn)lDEs(M3vJh7r9UV5G zvk0To$RhfO*kX@awMQv_U+j4^#N0tn!U$CgjT{qvax-Peg*f{lJi4Z9S(V3p6Y9EUdeFO_kqImDvZ2r z_T0p8-)R6zFclx)TbC0(;;aV?4vs19^YZhrsniz@RjLqczw{yh><+ChFFj@g;J{MP zw7xvJdWnrZJnVB>>)XfhJh-W&1e#j2kd2D@~ZSlg-TRPWC!Ro}k{9$d8ibjq#805V0+aJcWS)TDfUiIR~rk zYW8UgKYceM`>+WargK_5nhx8avS{GwA{f;4ETjw-!CeHbiVE+{l9>m`M+J)^gL}`% zHzX{%GsGnyEu#sz%F4@bq%z901!snV}UH4!btd^q7$b%9gt7 zINdikwopW!gZ#b5hy@f&>p5jCOW>C>1+`%6S42k(kzeZFp8fO+t4^ZmqFBnqPM+F$ zST)N(?4}=-fAH;_lSTrOwhwz_;Zs1+y+nBBFVl9@DGTb_Kz$KbNEoQ$^{ZK;-^RVa zK1YRj!zLnu(P7ZB7j3xE*j>KHvXW15Q>yRKpUesI4vT&>TByqbt@lN*Fi4!68jv`IQAGw1OwR4*JF+p!Jmsge#0SOF12}ODFdjB&X9tk;^ zAvbmUxS^2|DoXf#_+I3IMAE<>2}L5)QT=yHTB_g?T6OSvfj>JV0)8e5qj4_ZY?J2; zo7?6gUHO3kp26+>T(;blcV4`xNtvWnQ|{FWB*(zHH=0TIJf=6cPCel9@|~Qrvc%gc zM8AmeZt6IG&9|tMMr3{nQN6uma4#58RSk`X7*NuYpIfDL6;?dUQ`DVi>`^yx8nqjf z&cMbez(`Pzx~O2*B(cLLCW*xIR>8v~q$>UbF4RVC)CzcQ{Dv4FR;C`-OzkQ9@L618 z56;o|Mns?+9dH6FYRI-fIZT!^J1fr?-(6E|V@N_%ix?XhcX1O})$k0c*^RcgwAt=v zn?xmdGXAVsp+JD$3A?&CBZxpp*T>(&09(acwTHi!eu{Z49QobO;>~ZqsBBJQ21yz? z<&=0^`mYC!HY4I!X5!$|4X_by9J-xP&kZ5kdTqY#64e5|8j?|Bu~i& zOKEc-ve5_hUB#~BzL$?u-#Gdax_U0WadZ^h>yN9dVYU9)M~|2VpQu(5F=xKY9N4w{1pbyGod_QiSw$t+PwCa7g9e)0=C~7@3Rs z)8SKRSxnjwH3e9%Mn%BQ(S6mwdJz*LV?$w!WDBJooM4{pLHxIFLS)Iw*s+wo13p`K zL+nKMQQmEnR*4mA)!XjeQ9^qjX)f>F|1R|=I{vBo~2x2 z{DB+I(`p+jK^Rg(fo@0cX@7sM9w!aoFR+C97wn}M2n1?U)3ugq+dz_$e$=kWcryN7v4R^KJqv_6?>$TW66~FULhp z3e4YeIMeTUKIo3Jlk3~y4=cPX_{J|;?QLg?JxoCH{5nZs3aQ#u$K!Yrf4dKVq03p+ z&@gA}v$IJ{OWP)-GmKqx*2pC&rz4M(@Y2(dr7VGmooMvTMYX+H(}9FL|aem=4(hsdC}Y;6PH(sq6V(}~*M&t1!@i-9UqImv_E_)d`2QZu;kIk!<0J2oI*UF=XY^Dj~R(Vb9E|; z#KpWvx)!y@gh9a{fACY^7JRi*xxF-V;UmNoF>TDF&Z*AfI0p`wLWIx~n^xf_XQf3+ z-Y53-_N9I{Tf-bKS8&~K?8U;`8TqECh?8>lnTCljD(ST>!5wRwlE0h^NY{U__*Ho# zw0^+rb%Kwb<3BxqjaI1w^uG!SZhzWE1DCd!wbYyXH>n$?EJ{TqHR3au{?B z{iB9#`8d^af?ichExeCf|2pGNcx1wq+x*+k*mEW(X$3pR-{MN7*!;M#vRDv@R64dV zHzdFfXlENy6a{T%&#Y6=@kVHxYJ|CHP}#bgzz zrKdxM!#BpJ%OAs1Bc+G?i~gflq)P`zYI+(RaaEBMyt?lf%~KZ5(`p?&2~Y#5On)xa zx_G(ws$}zRB!yeFc=vOop(OQp7RlCLV3!0R4T{(Jk) z+uYm)xOQNBXJ45y!IcsdXS%KME4F$HrF=^{-wd@vN<#hmn6w-N6BJaTe@Bdi`Pqe| zpp~>b8D%n4`lf6q)8kUtDWsV&uKHiK01alpCKj^Z24{a(>R*o@=YRV)Smo&nZd@GJ z@L@WJGk{yb<8Ya{SSwvlPAOz~F(xUupW{IHY4m4A5<|JIb=*{+-@JCG+bS=Mb+NN2 zSxJkLCiCn^0bD=nKDW5iu+pKDGA>Sca2{rJKt)Nvkp%>3AL^y?rco<%q$ZyayMyma3w zX!(fe{G|qOiUWHt=00-O+NqfHTiy9Xb{ufIccd0YKzJALQTI{1wdqCp2EP|GcD2w| zo@bBte(K$ZZGDJEhV5mot%G-%%zWS@dN1!!$NA6#pD$ZkbpkwYO4i#x2Q3YKx#|K7 z^i;%XjnT2_1D;$A09TMPt=`~q)J|!Sywz}*Q6h1Nn?Fv! zksyJ*tj1YS(3*s%0}*)9TDrSRwwQ$(VLn2cvHpU1U(e?#CgSYOqqhuGG_()K$?Egh zi@F}QAvu+io`>J#(rY7r8%b-wA@7^Jdd_O$Tn{_=sQC`Tc>!lU;QL7GhHSP^tNh5y z9|qYShIm0?PE%^faQ&3%q(klz_ne{u)eSKP>!RpnvhE z#Jhgb_`7-2i;Ap0p0n7_mhC5Epw{bad7L&iFYl+-bc3zk(&@c^m4SXDhewM>7{?j= zCJ`%U%{qiq;ukHK<5@tR3t`}c6WrCQFcO8eueEzFGwP(A0Ie0B*>VFDyZiX1oXU82 zebSf6NmgUs8W-4y2T)DGBFZz-6a+T!9lu5L?C`oe9JIDCqB16eDEHSC{e-)ZHQ?G|edv~u*P*AYlKU;cQh`3c*iMfG2U-${MjXcZc0rIau);ONX z+AEUFUlGfV41R6ImECGHpkJ?MF{t(0=Zj9*AGIvjkYdk1G(JozxI2{I*Rl5Ww`1rlGDW4&+tNn zo`k|N*9_zKTI7(Z{X0uvzPsP~jo8icgk>lV+@ZhTe7RJ6^6eBGx3_}hwY?G)@S}HR zJPh~?Z!N)Bi}lqtb3$vq>?&vpaJtc$K4TC`vVx#lOnea~MC8?h{51Pz^Jmd65vU2V ztOLxxGasrn$8HuU{3+|L6)t2UY{_d>ikUT79%ms%YX>LxO=KyAe&bA zEG>~%f4SqdvoBlU*(q$XsP7kNj~k81B1a_@i9IN@V-5^}r7kLLIV7@PneG+Ey2d0$ z4qJ?V_w+sbA}2sd(3758vgfR;qrFym14-yhW?xwRL9Bw|Y6nBrz95W$(zBj6N&gSO zt|N@q#m62_bQy}Z9EwHb+UZ7?JILHe`U;!aKg8Ut=nS#P5M(Y81~wuvCM#L%0m$8F z?-W=AA>i8RZrfaKf>8o13B&T13v&XaaeCBHI^y{RVmLlFwt)86G*2QK;Rc^n`lD-< z7uw%4eP#SNm2 zj`im~Y{-K9fdkO3fJ;u zak1HtmJgYp*W5>o^@vQy4w!JCE01 z;2Ze$t>(Fnn-3nVzsFW?gpGLbaR3 zGNhyJv{A}oNe6a!%~KNK>2;Og^OJl#Ax`?(K>LN;nJ$1Px5o7R5$N#HE$bWfe$!cMz+4&o(4$0>=;>?l#y?5aRU6tUhjUp4;rww8?_Ct8^oNydgfj zMykk%Sb18UX9I@rY&2y6ix@W+Q&czNSd{FmlDAUN>^!=pxde{!8z1~lbBn_P%D9Mu zwtjPt9?Ocb?0P{P%DQtqcgYfUE=f$Ha`eeEp2__NRSwo8C34XurO1Pp?IdO&n{$Ym%q(>A94HJ zoA1tW6(6We6%={1#ehaOkLkW+oYYf6Ep!RVp6n%ll=1F4{$*WM_XUTzlz3Tzou8J7 z)yeN@%^l2SatlgMcAYHEve&fk{>V&V&MKe9{Px7PhQ7y2~2ska(nVo=z{}m;1I1syo6CaBvMhMmaz4Ba>yrD=Em3kO!V% zc$}^PogEhkCtiSmp%IglAYk&T1k~Me;r$@&?dv1k-Y$;bCkbianGo03#xORl!E@!v z_3iL+kWG(Rv!Sd#l$6QUjIw7)j9Rqp@m>o+bylIc`WVJ6KUu;pL>9!0LapdYA1%|` z*}?R<1! zA1jRIi$*J5Y$E}>?;K@?%|NV2uzVb>qM=XoNJ#2A_8u44cM~x7Gk;OIMO~1Rwo8di z=uwD^Q-|wv86T4AJA(F+VxkunbxlUhhJiT%xQ_x9S`)Az#UIFy-kAqW$bB;LB$P}U zOFyH}Si83fuPPoF;Vl%gcc!fRIuon!WFa>CYk!5b1>{ZAS|TJ^tRm2#%=w1Zj@ zem~SAehAr^JB%`-I*8Pg2?#J*5RMP}WqNHN(?@3EICFOI<7tH7;K#yEl6y)1$#BU| zt@o6TU`Cw4!TVK+yVZ$)`l&Tz=W`A|QOtKL2ce72W8K|kFLZD3?44CqQngZ7W!T2E?KSq-Qed6>o7vc&_e$rBy5(Vh-HO5+bAPK^;yFn3kxgwAr&}N$%pA} z1aliLd~ zXivt?l_`eR^3%WUS?x)qUT>c^-EoYY^|SEUd%UrEo!mv=Z;58=pL7h zI%7SoehXu(WNjO#z%j!F!>5!Py(R7Ib9`96@najd-WXGdkFT74kUo-5GYzj>x?ED% zls#!oOIIC*(3x${G*F2h6;hXi)6!PoBC35Ts3)Q2OV=!ed!gu33sKjJ>-l2-h5I#0 zAx%-uW^=wPB@-M;2Zp}4H_?TDCTi^2#Z4(-J|V-oOD-3C*vlfO>{ZdHQNj^=H|tB6 zE|_srWQ1)ucytOQN2w~35>UGN`@ zayWv{JAlroJE2zU9j1$vGvj*f_&R7E&(^9}B{y`hMbF7K1A0c`A$NxQ~? z3LER~^O)_LJ<1k=_VpW%KH-Jn5aHyU6f_k~=aN~08u@16AV&`Kb8*vXO!GqO+Fz$| zLXMh>v>uEMP8@tYksmT$w=|Cyy|9?pZqf|CaG;_}!)ScZ&dW<=SZ$?Hn@YZUc)W~- z;q~V3S0Re}NVD(rddmM2lxfaBXzuc2z9OlbFbQ!$D)v`G%vTu%12mU9YKjR~-*^MS zdmDd;jf;(#u5X7$1D}OzikjC(+QAyPMPQoW`NS0&e;Ad$t1NA1&5@Vij3=-+<*mr< zl+A}q{36Te^%IJ5a|>)>=9^t~B#d~7cm88cg2`j(c&>nZqC?yX3FvpQSkB;ttg;=| zL#Xh^$-xkdqON~t`uorMq>QoUQ>C*12OmM;z7#y&J+b$nL)?nc$J>uj^lQ~BSY$I7 zRvT~3VnY^{csRn@|50_sd=6y%;n z_aPCu`{7%Vm6?M#7p$&N9}^kH1tm6I>dbY>%|3BTUNM~_+4=$qR&H$1MAqcA6rQeA zj|lj<`C`{DWqd}*#3DB%8$(CMW7dL+NJ>h?8w)-|Tx=vN4V5^VUxvs|5nSA5A`W{l zGLe=)BYJg$PN&1seF?ly(UFl{V6lZWt25OhFEnUyAPi;`pMA1G|qRBr*v3 zSp``1!dz}qv-15_NI#li?|P95V0zIBN3#@Y(9G5NCbfDXCv{`)e+M-T%yN9`A7Lru|s`&L`a3Q#6~9m94~+>lxn#PEO9)`}aY991|H1 z0UD%cWnjYOF}U`&5!n34W_;O-xL^o?RS zH6$ET?oXZKV)^yJR`V2<6rrlL8Z1_tR;9R^tb+-88Ch_3b3$;Svd|+nJ%@kx`F9q< z>Ho)beR%GR4P!1m0xGS9wDep&{QO+Rb&bLDr7Mw{kjMLrEjD06m`>8;SXLs2U(*}6 z-+epsGs^Ja`Oolto_=!?qT{0Q(Tb0;`_KId3uJDXm2qq^)gm`n8OKqP;no;o4Z`dB z^e211R+N{bxTK;!`QRb(I8ktdvtuqDd3-}jLRyA0emg}+@%Pww7!!;4mz9fKlP3M)pN_PMnT8<1Qc0-HHDJSI|u>C7QmR>D~ZRHVOyO^gjVm0 z`STZ|s~yGpcH9aRMSLowP`=D8&z(Ho@P+3&SoS$|p27{I(f_yDPWqs;;a+@5mU8 z?9m5#S(*6quir6s@&tsrd!slfAFt`7TCkK^Yf2cY-&;epW2#ibY(+Xa_* z>j@_(C&&VGk5z|l$ByEUv}Aa?dmzFhW5$@VaQE@X*BgJruZhPncg8e~43ERELx-?@ z_YP=-eBmM)QCTCytJ6>nAAdbmT0L@(?Z@VioF^zz^#-xCEQ_?Ln3xqo$?? zHx3$th`>PnvS$}EiwdFEYk?XAZX7w9kH7UhxA7I4lP|j*p=uq%1U;OzTI8@;G-?5| zN{vH#*;xC}HfSs&o|!ZQA^w3_x#KS!K6DuWxojwg_V0(i1=(1!{!4g=MW7BU#WGp% zgJDzehOd7R$}oxBo#Loi)cdymR1=U$dv$4QjbX_-X46eItJn#RPA6kjv2oKDE z>K+6J2IGS_zrc}w$KmVC;&Cy1LOs&b6LHJJDHt|v2)!g&>$r} z4UfMz7hw^h`0(vdk&{*cckU)rxegQoEC(;Vy%-U(VOX_t75@5RFXr4e6<1Fmilax5 z;gtoe_%&Q@EW+*g+=6Oz4GK$6V({RBe7sjyRAB3`f8*z`w<0Q%U1yb;dc#D-bcunU zSxT4{Ysce>saW>nKBIX)Sh4hRxJhf~-pRir~-dFhbr)`*d@70(%WdafpXX5!smT*fMrk1mE10#Gf z>85dTad$y_LLRqJV1j~*`^jv2<5HPQgY>i%-1xu*Ts3MWep$aA>p%S#(J@gC(x?EW zr)J`jryoG?%VO}`Z@*&A>l-n2Tz|~De>w`Yi}Cc_H=uzAeMiUP&WCPAR%$Mue`p!P zV*QbuR)m4q#^LUJ?nd(A9Bx6vw(D=2g8oB$!pU8$Sl){wm|(QwyG=+qkOprbZ@72~ zm^phI3QiXCanrk3A80jNl#~|Z*NuPT(9YxV^9tl{3nODgF@4r#sB~;n+rTYd*RR$TrPWmY32Hw&Yt4#BX|L*e1hT%D>=TU~{}HgCnR-~Ela=uTYh zPf1V4^><7|w_cs_>#rLzeE3j=g@y8!-tRJ-KRh0 z0yr0;Bmr7Uhs3NzTz&mO%)0$Xq$lR%+4---zq1P-UGxY7`~&d*J1dd6FBN`X%@e>B z@k?=S$jWD9$E2>sD~)~>_J>~e1LVTkLg(JU4j?>$xd+rBIWrMcW{<_7E8_VW zW@R1Sl?)#bUx>^S%xZmjt~;5{9rwMD+=9@EApEm)7e-t;94;PCs56>z_@4xo?Un#pfVJDMql(*sfFlZEZ@>K(47F9zFgFX_0!&7Abq!9QEJF95 z{qXQ3k0LiG8&5v52(jE+Ly40Y;^cyfJ^R3DF=O4)eJCg{!p)bBK!|?;{z^(fYQYKa z%5Y$pZisVsN06XFkKj<`7MwsJT<~Ib@8<{BmTa72xBgYtdy;D6X421;%PKe)x7HYD$d=3J$=~QT-um4G8NJ z3J(ttB<(*ALDa!wQXw@V3rYLZk#{l|*GwLP$v2EcrJ(}Hl8*AJ!0ugp!rjRm|NCS; zwr<`HHxE4?T6`zMB17@TC*NVqhF$10vJ39G^LA90R`S?mc_}5hVb(PGM!G|Gu0!|! zG04fzMR9f&Tm(;47S&?QZ`)8^UV|QeyJFacfym9t#hN$2ZUxR@`VW`MEfuhn9F1*A> z$0DVo3`>672s30{9vFgahhK@}+8TWJ^;a;6CXBuEN(}AX4Tp{$!`Hia!pqMWS9I=% zAXz|nZ(sO&df`A$CMub$U9|?uB`5J`W(v1>oZc%QzMkILaP$yzvU4yxt_yl~?Tn=2 z0&M!{Z@BpQz@iekRixP^7!&S!4uK(&C{6qu%b%KyNM?ZqQ}079qjG=-Ra#jvhRQCC{!#co$F1ee8B< z-89&-eK+F!#Un7l4@V9kMrCy+E*o$K{@l0|U#|QK!JR#E--GwUC)fu^5)PxZs0865 zVd&DSGxqL0h!wAYhUmCx?hbI;t&=hSx{>&O^B?&9-5(Ge8>hIld!Nx_bsf zf`jn;58Lqf*1Z@v{c;Q*G8mbuC$Q$db&9|xt58x}gf9JJFz3M;aPxG-@qIaXY2gw? z#YJ#;U~J>=3z_A^SvNL4qlQtefvB&<<4-(>no0{6-~S93?ljyDp3DWjw3KwrzH268 zx(8$T?tk#}=Q|O9MNeEkWf)2e%JJ(b+fjJ35WUC7;?8-uA^o`GzAqvs1U1Ggctz;& z;DY;MtdQ{Pb4yU1U5!Wn`yhNm-I0AF10`j}aCLP?ub$l@n>G0Oy>&=TI*yng{+RpV zom?c&$;rpb6Qyu*bwSTQU67wuh-V&r8JLNp3k`urLQ zW+yIySyV>Ix;orD?+*9|d2zv~=wu0Z_tq&U76*16#=Eb5!mT%$93lxK($cbS_&rJ zI2z+7k4BZT3dfTZVXQGAEGQCswI}|!=1Zg|WZ=qC12F61sW^Jf@iVcgJBaOr>R{ z;y;hiM!!KlQF5{f8bRRqcr~;L^a;hEzih>!T}Mz-R)NJ!?n6LuAeO(j8f695aMn93 zu03T9%4>=d+biA7U;T~) zyN}?;yC$Ja*Dg4;D+%AO`w`)hA#nE8VdkxqVUmm}C@6-}Sc}jwKXmWh6G!%>he+V%evH%B(IIgv#KB%@>yR1g6H6~&99AW;bhjEI6r zFcFQQsGwv7^h#1hKm;W3?rhFE&dyHy-m0Dfj4b#1`TFWKzjfW&-tMlao~rJCPMvf3 z_M5|0RT!D`(rlVGjpOyjZ;*Z}kHWGduIt;B2~)?DdMb^1Ghd_ajZNv)<$BE2(5yv* zl&3v^CK-**6O~2Jv2TAQ(~yH+=Z+ygohKGQL`YZ&%NDMY{pBh8OFoyVFuE=Kk?w{tjj5_FZIM$i>IaAGOMj^J%BJd+Jbn4(Nv60vq1n#OI&v zm%x-oWw}fa_jhJ6m73I41CK3yn5L}~$<4?kH!oNEp0>WGHU6IdeDKZ&4t)6omil^L zTs<30y_q=^UZH7BbIxa+<(_-)pvT~DY}&Y)&1=4-c~o<>=s1&gl1^6u!1OkqWWEi! zs}n#rLc0T%$)&`7!V`?Dh80>E4V*q@bauJin}Bj5tS z{@MNSYW+Vf0)MmwqF{yzMwlqzE52VgEDk>~r}vl31MDA`WPh>d{uKc*A%SSDsY2)U z$I@ttii*L_&4V`AwIeC1i3Fe4y#MxPGH0N(*|Aqv$^xs|%M-OmgSw^`bDasDuP;`u zMoQ~dmy}XgQOT$_?HJmz3*Q_)!lLgFp?A}u_wXbzAb`5cD%wWJb63}%oXa$@;H%B} z2REXH#)n~jZlbWdh7UhmPo<@ryKldpPT_H!IC+Y>U+yFj=+mSb{rmOjR8bL|wr?lW z*M~d$_s0)d`OaIMa^yiPw2U410Ox9}ShHm#fsGnbYpj)wQ~`q7HCkC{p}GbSFE32) zZkVy6v6(2UwiDL6AN_`nRRFLbzG2&|kJ2(uuyquW=E|Gm2U2ILCNHy?fM8$z{QR)m z%w!qzQMp58<4AOBtz2oIk*-LB&06>XQISzRJaW#(Q;1@kx4SP!O9j)Ox=*qR9=T`X zrAuf8ue|ycC3$7cd-OFTqC!x+*E8wSiNczP*;8I%&VtDV`UJD;)itD_%OWf!TuQl# ztxb{@JFx|bpB!jEoXhdCChnuITWO$j?V5zI4JUgFT!y>us)*+-87xLU^ zpGbhGTBpG;EEIc16}@8<8PvHehLkkk{CpF&!X^%w(6J{i+PCGCQ%Bi%>NuKu3pZYS z9fR6-BJ+P_G7XujbnDZT^pYZ0eY_sO*l5fuwUmyms;=h6L4!$*i(}c2E&SVHz)PpX z&DR$%e}8nS(28+VZ${kc)W*SxSOW&&3SjpN93j#qV;ei zD<^|_%O02fxY^@gAT}mm`Z`G+F#$QNZ4{N|Q(a<`l4pYHWHi^3n^{6&s1JTV0T=yp zs;Vo9Y1Wug4-Uc0$BUGc*}U?^GGY^=B!JNHO(mI5X(^f9|L7<>bZIXcI=j|?O;TK< z^yCpKBKo4rSV>7qIgP@7@zi-q|I?iG0(=7WMEb{*Uz|f?mvHW%^e@tr3z#=;kz{3= zZM9TV#$!)R#z!B>n~PUdQDh>hQ7Av2aL%mC755(1nma~}V(-qwe7IsGacu*bG<5=6 zjgHq}UCxE?^5uRr=HE}VCe3;GwNE&EJedH0Kg<>r=?y-~E&MC*u!ySvtj3G7SKL)@QJ`toF z(s}T)F?8tCR?3`~l$4U6SHSTjr`falkd)UG08U{^K2_yLqObL1`m+yXNKNC_M?SoG zjnUDO=yi%;U5%+m0)S$BY1w1co3uPN^?9P>h1{Kv`g$|CS(x7r=fP& zF!P>E5_yEdf|sA9BG9NB_k@oARkk;P5J@~ww}Tmbw>Nj zjMiTp8<#t}35iyt$%vP~jw!RpQC?cf)03A-hMNGuoM4;PZjm`}s)CrfNVHll71d=F z=T#6M8H|UUmwb2oH~s$DZa_~1kSz5bBguYIp1CO-Tyh6_17KYM}v?8*5v zNl1!j*{dr^JDE$Jxt1nfA{akuoLu}Oq?02PLdi~5^62@+)A024;F0l{`ZqE<6us61 zwN=P8rqFNr^$Z_9jPDPfX8C;4U&6ah%xEeE)^4^L$uQ*8D9jIUFU5bcq_l+UvN|H; z!lf*;+O9>d(vxgB%g710(!FHk#puyP>3LgM zwr$zY<~3i@Dzdqh^E{Pxiq5|afN62FyKy@G6a((+%;2Ge*|+gK=>e7)C!~Iz9%Bu= z$2NS z>w+TL^7$?fRTQE3^uV*Ej8V7UPMzL^PuG3I^-)pW+Or3hWhEH$@~Cq6kUS0Vx_Y8x zV<^*WcyHSljKX_HEsQYi3j0lXLx}6KTIJ6I)KXlSteEV2te%bW503+l7Gr5XmVy*K zHD)Z*1MKPwB@!S|2^lkmp)14-?Tik`)WqP*qwdJwil*rZB&N zww;?ZcG_)ZzsxEXb!yO-SJBfygxmY{lcQKq6^?TJrBbKE_=u~igOmTN$#F(aIgu3TdLX&+c^ z7Ds;xAWOX1+GO5o)fz3TJ|HOM~-=z&Ujft}f9-kZUr2lDlSgS@loQz@+_{Dy=ip6qSMIfS2){84Z)F3WwZy^hSZeCEIT1RAZL zM;~~axTrYNveLM9Tt9BQX#l%E_&1wA+b)ad8HOBg?B9i`m?*q`y)a`Ws!0gpf{C(Z zH5bkscxv9Gl$MqTG@4#&gGL!STa_o=LW?Ja@>LS)BORhJRn zES#~Aj+F9=i>AIuobb)FD;`|pO90QJtrmM7m34*8d3Fx9b#|Vb{2Va}QRr1V2}0e_ zuM;C48pOHt=U6&xEwS+lsO(lbM;q&nGRN|>a_QRVdWKKBnUw5vH1Y|-N9WJ$FE8i9 z(E^&p#8XyPLSlz7?tSQP3i69dK4-u~?Im-g+E&T6ZQ7u>da-o=8`$gAWTxkE&&=EC z*sY_K@!Ys_JF$r|XzgNAh82xq45`#uR3;2*Sv>#xEPVZZdHKaf*s3(R>%8S6kBi&I zdxFxxilq6a``&*Ge*M~r5ChI2=Ca3z)8YE-`Sjf#?A)@K_;?}T=iJ1#DD$5(H!1$7 z%FoJJ6%wO@v6qpSN#DWU2?}Y1kFO^dt5GsTgBpjka@pJb@Wokr4eieG`v!3M(D%GE z_cP)Xn_);fCw&sT^k~n082laFXc9yb&F*Dq<@ieI+F#ngk)xERxNp-l(Pmb=6YIR-;BE_ z4&~_4ldOF9Q}EL8{F13y>#aO9aS4gBtw>2f#oc!gqvwz=Z2D|7TRzywHIXglGu5fA zAFl!c>%!on!UN0+fWTB@{Tscq{Ao>b0bqYxs9h&&U<6zMm(?1`eRoF{%=LV1%UnE>Mz&T{`V31>j7W^ELpk;V@)MG!S1jNY(PguL^QsB zfpqNDg^-Y7cJJKAh7F$(AFud)Tn>UMQE>tr3UK56uTts)V?`z1nzmqg+YWqt?0eRp zJ&r0Q2))%N{i@6w6}F-h?!2Z09b30%>+$c{{Pj1yJnT*>ceix+7Am!NO8>_g{k~x% zXcid8mYuseU0Z?5O-Ef$7Nc(;fnRV0TR-@iPGRBn>(~*KkaIKDQ>)cVR*lwb#iCJ@ zXRhOmBi~V4Z^EV(#jl?NG@>vp3)MeqmrAmnsw%6nS*>_`dr4-3BKP*+=Z12=OVN~I zm_~~sIavZ=;bCDcU$}&NMm zJXUYshQJbB|BYg)-ZU4&(1-wVH_z+gWj z6T-NzYYXyn3V3hHdc3rLsN|}6J2ttxUyZ=S(=yVTyL=`dp6+7EXJMd#8@1YxfSUcxyRF4_@He zS7uOMR?W+gF5{)Qrs3=7Pkv#AEEr1%YcIh zcdk;f7U1RhIU^+i_0g(Le6{1Cl$TThb_MKIur&ebE5NZV@Ypp{nl-mHgC?C704(!D zu>`;(<723+uR`l@WaeYjvDB$px_CJ`r-~VL*Fbs<=!~1224j^8qp=E&dp!~15ga>s zp4aETOJav0CQf{a!u(=hoVARY_(U=cDU6vulx|%+vwrn94(}-4T3QBA zES-e6mp89HzY?v*T{1fARW(d~>H$3c-K3q$@jZ#=-9^OgVwAYHH}f$$_hnzT%$cfXzzgx$G$Hy^LuPSb=W%FBz0ZP}Q6W{luM zih(85-X|_z$OhZwAQN37qzMJkDgdyunmnGF`!q(QjX6`ECpIw(wMB&?J(FR>Z=&Dm zZtU5AfOU&^5+B$6e|SL_B{v=8DmyKc&VAZ3Y~lbcZYJp=>8F*s4v}rJ;r|`%#50cO_j>S{bhgc+jD?5 zOSh1es7Sdga=Bt5jF#F{Xm-T`3?&Sn9>+4ODaVwZ-!9hWCoI3s3oO0T~H;p*~ zuvrTpA~7+6)vvC>a5f!_*-WeUO&BxzcFvwX&!QP?QF+6&i>G2S+jwrmtHj5(z>s!^ zy95JlNN2Wf-ND8+duSDTje`MpoX)qj;LiI-|Aq(HE4V*q@qku9N*^Bj5tS{;^%}YWP1f0#_=6Dlfp!7hxqr;xn-#(YZ-# zwtRV0KL5D{qF8+`K66PR{u49ouh-^39{~H6Lc7Cv;e~k%2nhHo0H#PGKy|f|!omVN zbnd~!6DN``R^LDMxWfZX6loh$m=c9qrMNEqgl=fwiV@egF9<)QZ=DzdR;bJ!dVs6R5}WJyp0U@6RbWfBqL;VgUQJ<5xWXmxH9 z0Lw|wp!dLT+&ZogKb$(oil;s!Fxm~H*(3|oSghF1YW%&!P^s z2DR0VPuFZ_@3w<9Pe_ss5wU7KBQ1ko1G{nC1N}I3_%QFy+d@KoGwjlL>S~MhN*+{H zR8m}ACPNez6@gx_yT}IkBjY9jusV$zwE)2UL%6MXZ_bw$uwvr|JR_s26&IsbQEkZJ z@mq$`EG&Xgzu3uz+-%gI?y_|P8Zz3fSiQWc*J>m%W-KUV#Pyx&)~qF3i=DLcA{OrW z7``>iGfc9$ zv~~>18QeGZ4!ZZ~#D662d-lTLBYaUu8+j^~x%47!kMwqBGt6}u%SCSkKF zJ$tpAC`^~t6cf=jh=(2?PgY7UFFyY2MTs+|V6DhfD$|_<{E`5e@Bq@1Q<_OqrwAUL zJf6(tV&+X-NK9fJWfg_QvOk z`|p2%lSfl{{keBZ=+KBslO|G}U&h=SuMwBfoV3)l+&AkEI(BHshE?17Zr?FNgd{0; z%$7PbvvPRy)k*mH_^{-e*!0bmC$aZ+S2|qeTg;HhF5aiPs zl}+(J6M2=Mn#IeVBw5aG>K25qN13X7UA6e$Z%2&29{1=Lu`EfMF}B^McV9W&|p{9Q)m6AUwfx$GMUpWIey`C3lF2>(I2#r=W z)Cp=`Ih&mTO?6!*1v!e|VsPU?DeNDj5yiXkPFsn3j~d8?+pv+_O898D$I zKkPC9?DwusCFk8yV@A+#NH-20Kg3(_ekkW}b0$oAZVcD{kH6ruH{RxmNukF803ZNK zL_t)@&Xe@Ky%)po>&tiF9OC_Xn@NakN^079hK|04-b1>w`>W5{ynG+?mp_Ttrst6f zPZ1dtE+rPT40$}eXd2-mVJum+lrzV2dG_U*L?=YCY{^Qpe=Mf7qKH;~VtHWleGUo4 z6~re-%0)Rsigx~zIr5og@`T5Uj%tif>+X;tRNjl6TY6dsb5}efefeIPwG>~sM$$V> zjw8q2$tsmy6(+jaehOx(B?r6uKZpUB8C+}%9n^)y8O(0N<|%FSLdfuy8F zRxe#c>bVRo^(I=hP2}Eb!#Q>O3`=Hyh)1A`$L3GPY_{?Igr&sAH6bnS0(ahZn~blJ zMcuS^FD)Wl%Q^gX)(N`Y+JezzM|1q(c~-r=hS-E?$#@m5(%3b)+4Y=GKjV-e^XU-IRnfdH5|me^N#T1dYOs{;^oe0~uGvx?%bP41_FVwZjvE8oR_ zqCg~MUgXb??>4Jl$~jpqR>ybC#qZ*H#=1Io?cPmUSt-dWDR_E#{vYTG*CqaMMBvW_ zz%DO7Un;$emw914{=&TZ1Ox`Kc<~}kbv0-;iVnC_s>^Hvy_+{vr#(VtMHx>#_6YIu z365e^Ltgw9i7kaSRZX`hEf~}GdcHmW1ItexL)|zOPm4varWQxj+N{*%PzeA8`dH_dHHz^9C$Nr+FUQ^=RIHT zwQW z-hy4nj76lhDl zgzKoa)RLW>!%J^Ii{7qf`U6i88y72?C8(|p=m^kNXm%A^9(PoeuB#KO6iL?{l&2)B{t=?S5|W7kU{#~h&iyd ztO#R;jhNU7G-`#tCpxaSt{SfZH>S)UPf1}J&rVxRLQGTaY73|oNy^JX&R?G;?U+n; z1V;wYe|Qfmr-r zmwkJ_=Dk-o5#A(-ybHw)d7vw|48EBk4xi`sXWu2EQzIr!c!-kxa^}r^o!I#1q#Mq1 z&&=U;>D+CEPQ$eZZ=g-lfG3_60GNe2ljliBp$;w8m1QKfjAG2x5g0PknLlkc z(XkQeRa)sCn30o9LTnsGh55AW-ii^E265`#Y2J8rJ)QcuVa&J@loyooN3;Pxc~7{bneiBb;~!gclXys$431M zEGvp_I#N?FF!X^w3>-9oe{Vg+y44$qi;s3f2Uncm1^6Z86-z1$Y1uiE&OJIv*5c8F z$2qj;qy(&#d88rt>dVFhHcHBiXnt)X_dYzD3uiB|_{n!jNQk@mZt;8br(H+^r(`gD z?xQqm5yyg;7jW*|3_{}rDKM1s#Nx@s#>cU0#d{pyahw}&>&dWjy-7ZIj@KSpOF~i% zsmU2keBxf(b!gAK@4d_Z&yF%@(R3Pzhp}k>V#@MrQ0q06Sjt%N^3&KVY|NPWyu7Sg z&pbr)>sqp6>06xIlT4MpnjS;jaOeFaIe#LBg|k;niA2f5HDvPAlBe+SaA(T+$BB)I zkltT5wUF5r0GM*G5Dcx<)KrFy>&d{I2eD_{cdT2zUd}rLn3clClFsgwK>XzlrU2%8 zlGD#|^XOi*>vSywx8JjKKPksE@b&b^D(1qg@Bn*a!9)_05?Qr$Eve_yu~vzfWKOU0U)-0$}2O zP54ZT@6Iph&*HmUtwtw&w1phGSimQNE;U-M_)IXi#g|I?oeKc_yQG+_9j*vCp3PLU z{_@=1xjgs6OC%*F{X`AH30D7BxGr3{@QYums>+HRQmJ!wfhz*82>b~V`2PxkIlN&S z1b~V4+Lr-hE&%LL=zP~HB)}TfC5g6$2BjRAjoPYmI>jY%ce`$QYIK|~FJ{%2jnoJDV$-Q8%gN%w z+lJC9B$n*F0$%xi6BX4pjO#go)-77HFXbG&_wU2qW@GrEK_o>+aP-F$Z1{Q~HGsE| zKi6Dyt(5mUdFm7%o=@EBnDoTmbm(|JyS9AA+SQxnjxn*}a+P;VN*a$mH;!g4o3L}+SA6=; z&Wi^cHe#^!H`%%K3o6R1nE2Ru+Fsv=T|0L2$?DB?AKaF)_l(7GE`zy`FC;ELR<5`= z*47dl8_2&Nxr5@;5|+<@p8`WUfsK6e_w#d9j~iB~cgh@%fAB$i^%{W1V&S88Z?kLX z4kDw(O6x0DqOU4&f{iB_Tkd8Hkro)`2K zqs_*S1N-?tEd`akI}s5PQr;}LFprYlJVJGzjOafQAG@0ETeoqfFq8X-3?m^Xiv7oq zvhBoi^dZ6Wyw>6p?r7bX&TX#a>+|RN{NUH9-1KNX+@;*6xIW?eC6HmK|J$!MQ0!7~ zv0(8GW8eeN&^R)V(zD;N?CA-_#K)n*d{uF2@XnIiE^!d`0Puze7KtjW{;#>_wH={>CmX?*u0`FZ@hT`SpMe+|h%zJdnWsI!`_ts@Q+R4bspi9s8-1*=zPMS%X&l*UIehf~CnQESrD?M!wCUK4g2E!+S@sd< zf5@PH?JGR8~}>6SB5yyYyDNZ}LcP=+}iWzxX#>KKNV$ zf-`*8R6-E4Ssw|rr( ztEdc*--^nrW7n=PX%rkndU7)N z&%A@KojUX3%1wN;=MZ5LPS(`rIVApHgUMD)K~*j+;$6Pq{UhCn^x%#M`eUrE zX2+T@*|YgO659v!@Z^VOT;{*DkiwKQMn5!^KK;9M?C3FGpSwmLZ}f~?>C>whhj$-k z`CtiipIAjqLL?%70swn@-b~_}M6>?m z4QyY(SMD$U=OjJS!Q?_sy{(qQ+I;4|I0tK;n#U(TOLTk`I+gYkqsf8f|JqZYkWAI! z&V{to+%;tgy#{o{W>K->{Y~s#e}Ja3O|Yme4Pse}dr3;FfhQMCCNVLQ6-(cjj7qZr zz^;vD?9>sQJ9C~zvy}GFTrh!zq{fqQCaY+H8JJJK}3ayv*pZjHXPWCUt_Tf97=Ms7=QaP z+5|MFwxpbTH@#eu8|dLBeVo?p|B7mjG_k->tzmfg9z+KQ$wH=JaB6gVRNkKan485% z+qdB#5^_c0N_bwWs3*6)p6Kgu;g(V3(Ykwb;mCeIePs$Q<03Jk#wPWx{iuH zTS{cpU`C7@N~3@PYO9S{ts1`BcZ3ZeY$Yx-8t+CrhTlDu;HY5g>W$RZ*5mEzjfcA% zXHO^d$}`It`L8?ZH|R!A96!#|=T;J#(3tGhT*gfs#f>+1g6>W{Q z|5UxbXn}$KjuNuS&t{9Fg+dj^#$CLt_^Pv=fSb`GY|<=m&?Ilo+VF8rBxz+;N=aF~ z&RTUcUyi72iKrq!FHj9Rtq_-`gw}lr!BOcfBdWP1WGyFQ3wJRcVZ|>NVaEAr{!u zZm*?x4HLzs?r)G=g=-yp#Fc?*7QZ6d2a=fp=?_19j1?JmT_^ib^ zr_v9=ko|)b5fuyCE1Ea|Q>KD%U`qO5V{1$x@@o-^zN#nqooSDwf@Vb?!Z%2;rA`^1 zCt2)(mKye>SkvqG36TLQGSPnjgWRXicv93m5s-RI116vN-Z88yt)`TLDrJu6PdIJe z9LFC&(vo;l>-9ZIR~wJBMp`4D{-{)dB)dqX6sWLzJ;qEeZjCuGfz3=gLTNHZOh4L2O3NE^!+e!5l`8Aq zc&p@ku|mvjJY<9#5e?U$U)|3=AQPHbS&PxM7&K;xYPJ34KY@U~!R^74-Jv5^bzH-R z%2oA){^w6Bsd5jm7cphEg`Hv*45zdSmr~yI4Q2zDorFAf%c_-AW$RC481lI6zG*pe z&ri=z5nW6)OnaTMrRbMs2Iou5F>30v3U-jKCdkHsz)UWCZ<2Hz?iN4hr`iI0!u7>L1Cl%D=*YDw34QLe>wXW882znuMaNAC*u&x2K`sbU6eg> zDT%#0%#xXQ5$a~!)uAvu8?s^*EVnpZ%A!+{hzO|v@p$w8*#pM@RhTJCS6$RQ5%N2x zVmgb}cbyLBoy`M+n}_ZzSU7|>>AC9?!ZaP5L}^NJ_|2IP>P$9kle3H(^Nbn1s7YrL zLScqT7FSx#IePoMmCNt#UAb6`)}QX=>&xdveAT0V17N6o0!C_1M7+tDE!yKcPqByh zKabp9F1ZItMJ386w%4s!%Bt9rkxs`!)UDmKD*+1!IM1qo3uO%H6T%|Gb>6G%cxygc zx-CxzD=V2JqcX$Wv`7`Bfh8@tmt1nLc55XY6{rQ26?D6K7Df`*7*ZAz>B>3R2)D%J z@%^DKYzEbJ298-zIbotGC>X*HvTa#r2voPN#fz+1vSzpEm4iTVQ&t4ZMLgSIb5Xu6 z4}w)NPT5cu5|fzgk51RUL?H7^d`AsS5+xSWM8?EW2cukDYt5dJDyyD+32)R&evMjt z1+W`41C)m79JGGUA?aVDSKym<>kHoRpC#U(qq-RbY=76lz@!7@ShX1nZ#Vje#l@xn z#fX5K&dvT*SEnp3vmXSAT_zcw52jb_v)*@g;r5IE6fkL0P*e|8Ehs5$Od2SbuwHMB zlCk0^=PSm>Nk(r}1uA|2-r>i<`{p`e%Phrte2>7{dHe2IX8H%6#>$H~w2|gAlP5!V zb6wG{y5CgAh$Hha(1Y3>CwIg{2wvz4=l8aPj`6l%-6c|P)uybs_c?*zx6iO&v!I3> zA?{E54QouW!&|U7gp!i8xlhsm+FSh!rkq(pAgLDxf<>o;L4Hn%uQ#x?z73>q1VIQ! zCZ-IKr-Hn$YG2E?Z_p%}xHYgBU?6fHfoHRLwL~q^HCk0STPV^{|Q%LETA^2irJo80DC^N zko4GT`iKw9@pGzqP=v*Ulhn%U;(PT8)|$Fh>p|uV=q0h)#4p0oe82dVvByb>ZJ$5b zQIYXObqkUeYXU9o-&ZJa9mm=ZQncXUZ%l`q@v*4S6@6>B6k|peCua zJ&{TwD@cRt=I!=EaXo9DK9;@T|9Vo|xRHWMyPIL3SsgFe&}()?()X+c$sUQ=_|Q7g z0xY)jDb3`xd*coNY?%T`0P(o5u0zw)xJD-U$SP10`qdNXRI$M6{rkJy*gCk8XV(WVN2M`b7HP z5yc3NP>EwI5!&zSMuNpP@s9Yv#M6AjcdC;fUW*-B9L}M&qJ2MWiI{9$DeKNrj|EC} zZLY7c{}6UXKM>dw6VW~LHzzB=f8x*;mdqH5bBo4dPnHp5x<3q}4@gP0>p|!n`eHb> zf}zBuXm$#294|E#-2Y_BoZLS+Ow3g2t?R-xN81St>ma8Rco7my`P+L8V@JxxrY!kf zWL9{0v18ZvQmi$~o^*Rf-wh~q0P}ghFDmbhjpJVtR{CC(LECOjTB+TmqEV62`n62O~rK#H6?syb`>FJF2w(Ts!Uv9LBZHcSNI*Nox zc)1B<-Y-ra`$S=mR4o{^Y>+pq9xAUX8ArsAh>T9!vfcJ|Neav|gtPwq1xAKp0y`va zY>ia4-aN3pttN?`F&SxnRP5G6LPZDJr4uJQ73O-vo1B#)bL6``)98T$ay(Iwas6@m z6&u)z;_QhCifUvG3u+Kh7@6oDVDyVzC-Z5!8Us01Z`XG>1&zhQk!zNraoy?h*ZR&} zh)s(w@7J3aOiE5OD=aMV4<%09kNXGcwI`dKBqU@CCNxC~r35~&5BY>O{`78_B1Ni7 zin@RCji8Y+Frsx!5~c6I3t@mDeNfp*q~@Wpu`-x$vpUh}>ECAo8hVnXz0#`hi6O<1 zUG{4D+U8J!oJ@Z490mCGKpF`)!a?`vdM0GAvL+oC-SrG}L2LJ!i{jx^Oh`qoO2KktGkybwNTN~85}Lq&n$NAl|3bVUk64C?rI6oJPgUUc6cdh zEA|C^d&8bNB72o;T}mUXLV^47L(pj1!TBq0PEVm^nM#*2 zu!-8e4Fi>`Yj9StiHFnSuLzj*{ShhjSxuWN)r^XX3}@9E`Rr~6*&O@*HT$+wk^&hx ze`Zq9$H!+o9vH5}GLm3_|4PRjSTg$JN+rg7P-k179Sr;(JZS?p8ClN$JsNb(xy1Y- zNcu1#Cqu?k66b>3_20^DqH*#5-ri2wq$O~`G0!zEJwx^=Q*%CF1kkPPyKjtE@3Yqf zhiDlsIk6l_SfZK;H2kw%g)eT66PH@@lk)`1`*sT>gOtRPjy1%>>DQk{#EAXF{f5`U zpLNTpbhwymUa2k@Uw0rj27{XH`{MscwbY{}*IB-M`kzyv*JDM63Mi{;30bX7({A~b zAr&&8dj4}4`Pm7a0XbT#n!cWc9HEhFZjLFP#RA^pCW6UuPz0({@2l>fTY>H1$}&cD z7ZrHC>_!+v3@Bpn7dzwow{Vj9{1OQTq{k ztRP=Yqh5Y`R!TWiAfyd2^4t(MRY0b{+z2P*!D3H_pg}u2w7&v@AmVouknaJWgj|6v z;-xJP1P)Yy?iIzhI6Hze8}32TU|Mw3LS~UCB44*IDCp-c9pf2CZ5wK9(-BeT=M`_2 zlW_|r)ZD_sl5q5j-EelKx%H?ryvilcy}X(X3wF_vn3f#s@TlvD^LOih{xA)b!BA|N zYw&fbA2}X6-ry0Jl1=VGh=cE!IU-WasaWWPnU!|+40zYE)^6mTMVpxWt;!)P(b^8WEnz2}pd&1PNXYv=#;0=x$g1Y<$TKYtzP6Y+Y} zU|3pOmew#Evf+}dqoAS0s_YlQvrwTwG&NyWR4~9nry(wQvfAe4Jij0`Co1Z@&7WB; zl+F&$61{F}s);!$APX1(hAtI%mA&bBhJptSG(0%T65$#asWf6g17eIbLm&->-1#JR zEyS5Ae8`&%pWQVEI@=wu7x(|bEATXzStl*0I?Pvnn9p~Ue)4r^ap&~qV{Rp(nDxmW z0zj%pz=_h{AwHh#UiV0+a`&4yC`3&ndI78^cp=I5&XIv&b?baP7OIoB?(>tmn#gvVpCF7 z^U>rZ!yj=_?3{ly$+rQ-q%@p*$A0#FYj|%$Us%qVg|iI*^UTGx!pR@iqtMI94eIRI zu!yYu&YpWMHHdZJCe>(ErChp0nkjJbx#z&rB&4mKhoG{Ek8j z1(+E+Tc$+w=8KzQHTnnnVcvbCUWZulUa?c62O&oD{(+g2iV1*>OmIx(^PY3Jgfaei z0{r)~ljuyoNUNHz6j}n`sT*Sc-My=~sI-swZ*YZLf8%Zcm9djcupMCH{;aiq!PrGH zQjiec_hX|nu7cZlv|SW}*FWv7*AlgNPwuVJL7Kr9 zhkbMx?av=~SE?$S;!5>!c4{qkXTq2Cv5jRw86}xj_TDK}EflK@E7}2u?gl@A3D1ok z{9hE53_KF$p=JWHVRzTptU_b=Ar4#BI_2BA+!!vkl*fSU$(Qe}6%%`fB^3poJkLIk z7}Q?%onv%+=tC+`oFpmcNH{GSo1`RLl#Dw|33m=}1__yAQxx+W1IMF_u7bNdtj9|T zXj`$j^EiJ;ZLEn)S_Gw>noh?iI6?s*XI52Evy?Y$gFN|1=xrDPh5|Dp9fqYebY}B zd%jI|VQmskOnOH_QPix0m-`u)%ZLT(B_?8Gf-WLL%3`yG(xoe9V-o`@lhmis56*CM zOWIX`?if<5onVcsx@TEQh6+bSq`l6Og@LT#ZpE|mISKw~T#^reEn+4FpX6+73^1$u$6Q%GNi~fI7}ca~}NC*l)8|d=vw@a_<5qnPgb%Y(w7{BBY;S?r8wKwFgULXm76y=~MNC|6#g1>~adSuzfZFv{Fw zgKBbf@=c4em-U@i=Ie`r3WpTb@TUG0rx6fuc9`V2p8MO|a$Fx}7siFgNLWV_`UPOh z4>+gglje)ID^;#x#^p>uVU=s9v_oV|2A4aK7gd^=!m0;QRH_9d3LxgV2Vau1lIeGL zbB!6ock7&JqH;ks%bRpv>Y+dlLFN^-dHdNC)k^aPs?)O&>0FA#C~wR?No=wDf0j)W zgbRRAU4I%w*?M)nN9R<`;N-w6u$t2%Bfy(kepRg+IO3l?<}b5oTxT0v8ls(v~{a($LrJsj}4brNQ& z2~zlY%4(?%+<}kZ>0eN=WOwho4>}gI3jWJDPEg>VzNHg5G-PI#)jYEZ&8EiQ6h>q} z3hK)hC8s?~eP%3@(yw|}g{RErm5j>!n8oR@5{f1NdZPKmwK@O!yC9Hgav1U~mq}SrEFL5zu$0TFwXc&x1LIjZNHeG5|hm>9TSGZT= z1f9c$<}j}|v;a0e;4Aqq{ov!bfdaJ$W%`R{1(V;X6Qz{1v6pcRylK(OqNs}xr7!rW zd`;1od4kiE|pmh_t8V>L-rn@Cw9;hWenvJL6gYTjUbak+j4jk z`jgXvFEyVch887)P}VZ-Av(5_!#2j-*t;r*+lYs(Mu5|*ciuU@STLScA8}>$@qqj# zt{?ruB6{ch9H4U{ptUT-&sqqp2QjLb--jL(>``vozeo2?=RY>o+nGtxS>$Kz|72Z; zbzPc9(F3-_|I4}*4e`@Le653}IZZ@B#L{4n1jZ;5E|ua#jpm|dCG};OO2M{~+&po0 zU2`c!rm0cM>QL2EiAZRq$#ae|=C>9sjS0$|cyRJoTS-k9lBHS3;7LpDWX|*N{`was zBS}k@BViIE65VRTfb0u?{`r);k5y3%??C`=O{sQDVNL->qyYA1A+QQAs*U{aXj+Ua z>JVr$mFuU;@zRBU>n9vbZ6B%Vj_M)>r&y+=`VT)2! zPkvi74p8Q?m#9uHwo ztol;_Yj$f2&~t1%%LVQjJ~I-HGFpQ|eJet=iL9&;-KdFA7*1RONk_Iv+G&Z$YFiZh zf+{bM!HT~pIO^aKDYZ&Io`{0f{wm|IkIAf<^Ty9PwwR?xnIYL6xEZ)=*hj+?)M$I` zY^yGEv#g+<tI^xJXkJG@Dhh;HP)^JA)}zwJ~) z^?s!aME_S@*Bw}5zWdsS~{3{H|Nr^kk~-2pJtKIsvW7LT(foAK?#yWiicSpl`18LO(rQ_pGBqcqk8tt+^u?J zK{xly{j*lnuHYy=rwMHaCEh)B5FCsb@~YErp8A+3dCwpa;jMb4&yakOC84mj2&zFA zhd|(KyrJ-$#tLjeiTzU0-_rO~p!*D!li-LrP;YLBK7RyFtl9oD>P{pokOEa9eTh}j zvMr6j9zVoY^_AS%rta!tO zT$M<+4>k=hJn-OFHsWo9@fwC|t#4*_72C=HTn~?iD9~2GoR1ARkdfrlpy|Uz%~DIj;SV=@yM3Yp|~5 zz|0bPdz5vB41ED+W>ORLWS{HMQ0UTVnHAR;3!*HsaWRq$cbF}~46;$&DxGHmP>I-) zvB~e1i1HyLiVD4yVcEG!6&2A~lRPVsj#RVJE=KMkxyg)a*^4Ne#ZJ_+yQy%Mf;Um0 z&>y8&JBg}m<_SzlfF$)3oO#fFhZ@T+F^4LMop;An$Zf;B{{v7KAdlr zWnWo^>KF$c>~C&d_{bz_1Ji=)qfJ`W*Ujqmbwp*4Z7OYgA06Y9lB$_x?DUj(691IH z+Sl$En;zxrlpKF$+;g<9Afw@p+-zv4|5~1%f~qRAJIdca3>lvA13Y9;4fKK3L>y|1DrEkZK0gJXKbWLqGN8G30_QMk%0U=yYymwS#fkyr( zdO#xba|;|pg;C$>28yhh$*(21i|jESs!gP(*q9!*tMWXd0UQD4BXgAvnkm!kQbu`o zMd@PbEbq?RL!1%H$_(#YKPyuVvCSze-4F8cAvQt-ri$ZZESXz{!9idk-$r{OPd zE^SlD^p=C+{X;{lm%lMvVZ(Y%oZPB}v?RhwHoxYoB{;{Q;BQD_83dp;AmA#&5#h;= ziF(oS=qCb-{*hV!I-#VaOJVgAZl|EkjDoQA5nCTcWDB=U+|S*GR?g={>r&2drit~7 z`ZjTTpVYMP3}7ny5%K!bEJ~3r{!iVDbiIvY@4W<9A*;X7%qp8-HLN*bEJd7Cs)b~S z$ZencPI0JE9q;gn+m{j_I|d*`QMiQ@%_-iUdBlN;9nnUD~;jaB}^qf1Qc5@lav9Z? zNen<~pKLZhSF1+Ja%8lO;($b433=2&ORAOn1mkS5QbSzdDmY_NHNt}?XByIhBx z&CYMLfk_kuYgWTqKq>PXCgQS1(Y zqL0DbxMoD^EOXov6BuN&7yB$@7Gqo^w?z4@AJ($mhe@k6o!~U2iZpLlLGAUa4woJkqsEnu_yYCPlu0$!Bl0j- zfgdoQ0G5bLIDkW&??P+U6#XYP;ce+_jB{>4gGpX}xgJ$-nnZ{dYIm`E=k%El6H*!C z+%X||4Ycs2!u?rGt_K0V##N9K_LLo(CXtwJPU4#2K5#L=G@)qJb1WVyH!fbKQ5~VDk2bAwr z{G$8Krq~{YQ>eIWXLyMZIs~_~Uxhci>5=Z`4wS@oGo0k#&-U+deI0Mbiermo3hpT%6p`d8TriJ#L@ey-WcS`d<{aeNGdy@XC~rUR6B} zVvxFO`LZuBU6PU>Y)d+(Bb@$PVxH%cQAO<-Jdc)-Ab=yyB~3t zbR|D?dvvSg?~wzk5czBTmS2?>`?)nEzMqwp{2JX(Z!jC0D<{P$nS(n!jyOAAwQ=hI z%-tNzYE{hT*?O&h4}I(;%6}O4XinZWv{B;d$XUt_Ay-T@>v>hobzpi`<*i*qw)(1a zE&TK@wR4hPP&@DuvRGPPOI$7qwel50tchy1bN_0;kSS>?by4l$%Q)f=`ow;7yLFcf zG22uPzMDUquW?P)y!w>7r~t)Zi8Zw$b$S>GX(V-kqPn#&(th&F${!Sv+xur}mohSk z+J8S*J3(<;3At4eIld#}fKKOZ>URETVKavK$Gu$ON_py&P;s9;TrIUS?x)x0=M!A* zIHuDHbknnz@np<+ONHe$9Rg%!tW!mh!eZLEAdMxtGUWdrQ^?@SVBBJV7Un-0A?d^* zq=owzpupN&ZL@J5aJ-`x!Z zpXomRY8gwVAH2K}3dl4~kl@Rrt&4Fg>VJ>sln}az z0;y{L{v89>0r{s~MwjtFYzm2zEGuvLZL9)lta;FDqK*}ue2u6p6%Otq*7u< zc|T|lDiKQf6DHCAl1aKleF`1L|7-4l0AQt%Y30zVxDaa0;AFKJJqbx_Rrjgc?%~rR zM!G3D3O~`oLCY=_G#MQx^TkjVNIL%ie%ozg#9D#Wf1sAkpALkG!{B56;an%C1l zR2C~L^80^{e?8&@xvl}ISE1E@CXd(Ii?cw z-$PY)m0_IVkdgLNmnxIp7%Zf-G9Z;)8J6%*We24wP?{`2)B3+7AS&{|o0K^ghv*#wd(bUof=>WXVN`OW_h~<*XQ{Q?2TPT%ir!AunBJmC{3PtY`VzQxR@Js}_hD z<-r-!Boq-2n{2@)n-s&*@~6~4KM>_b#a$RyGv+djh{K~1vFV@UaWALaB^vqCWWUm0 zO&wCG>E!5%w5)Blt*@SVF5P1jic^&SLmqtPp4MGNO#VnaJ7;$2-zKf1m8Ab@iV20P)JktMqfp5-=uPbP)O8d%!=j{-!m`{U1AQd`t8{89j-5*7R$-yBo zd5NtQ{D|~}An#*Y=S1;NZ{C^HMl367ye(Wg6_J?E7FH(d)*=*OV(Uxu6vz(Tz{+TN zs|tp4f;QnOqDi2F+lOG#9FdIqgtCl-lY95-h}sY75N|rKvJp6Q{cr6}C)y!LBE`%~ z@4lk4Gdk;hWBySpUQe?j#(J!f(Owi~qL7G^6x++oD=;WXHvQ|%+NT|DJ1U(2BneHb zO?_Nsf7?Qw;f5w5(e=>neOVt ztx1cgI!z|hN2QjZRe~r-2AHUHz)MV=?1her2~1U!HdiNP<09&3o~#KSm}z@dp70a+ zA=Tl46R51ODX;!pK!EXYUck!wS{^%=xD;<_rWkQ&XJ;*+-{>#P#^VQ1$#Ka8Z3h?I zV36NE@}9-8A6J*c42omG=_yq`)9F6oFh53h*5-gifh#ow$;fUzD#skG3R?R8)*#h8jP=E;n&={7|OI4xZwAHP&hp4=W4_ z8cp|cD1Owv2u=J2)+;(XfyzdYgR{B`!;6mEBRHM_=v}w$n89B;bbZ?T^wzU=MjGy( ztbP5PsENoxB}HQ=5O;iGJ=LkH_q`@sh_v?;ePuB-2RtB3!vDmX%i7u+sb=YG>1kJ` zV$rZ-Ucyl7{Sx^q#y--uWMuxEluO#)-4l&y9G^aj6G z{2~Iyy7J~vRM>Yf$TVy%ZY|02$TEtWf$>c>2ezDlrO5-0(3(v)5d{UPUal9!?CfEt zReP}T@Fc91K?A`gc;|TgwpURH(XZsS{PVn!9`d>>vfAV75J3>D9X|A=B1N|_X(q0m1AoMXL~_~Gp>@h_?9i@pu{ldDy^3M z{L-w=4p##vT#LFCA`$et8{ZSl_-q|ph}6h2_zZSC(ciWF9j+I}g|))5 zRhLM(e2{fOGPvZCk??uo!qOx;K9}ns(HgV$W}B7OH68hl`N25|0Y4Hl%6)ljV-iu} z({Uw&*9^-wH*;!<&MGapk$;J6_*KB?Kq?>7;kkM^c(~pz!v>lR(U?3OHv%==))!4@ zSIv_Z8tTgZ8vbT^9StW*i1+(B#q$cnnGm0c$7>NIzS(Z7{rZAL-m3M+-!<&DZcpHn zl93kk-B5gdTX>xDzg?FctgS<=<;4De@Pc+yKAN~iBOV3^FJ+J<_@8*ZKpwVJ3Asr^in&yfF?L(s@xOo|wEd*d3vub$ zoGc?vL=UL8;9!{9d>5xI9Xv){gCo5T$_eTr(2=uGQCCG&R+KI;k7&#ch}51m%rD^& zhc~f+WjV9FpwEImGOY^G9z_0yPUdoUh0=HwGi@hSvi$Bz?B#ZO zq%N4U@H4U4*ZAONqiu(k{Aq9F{4o8}+DzkTNYKr~8Btmcnf8F^N->CNTy&mzMl|Mm zycGVeMC_!6(4sTyBiLrjrM+USZlNzA0l99e(I+W5C3tL zRbQ#&Y@kP>;C%b=0GFIV|D#AJ%yr^zpSt;@3E%EYi8FV~M;=k>e1Q`xdHRf7hR#^q z%r#s~qDVlFM|$2>fkv-8@Pm8z#u1go9KVauGd?j{qy%?u?(1aBxX|qGmicP*Z#Ub6 z2e!xk!#U~u3&`p9{bL$GxW}QmsfNDujU{H!FrEL-YA&&!;lyM^O-CvPwpe0*_}}T} zL%nu4 zhSg>Za$mkr3`5!U7TFlQ*26>B{lmkXxAsYQujg!FE9#H4$WxHf?uANu`2!F8xk#Vz zeJ`bwSdIogy|C3;KT)9-3vTbw$OF5s7^p26m;F65sz6JqO^XiYh@6b{?y)h|!NuiZ zQmy1qzv^X*3INh2T)eMWoIaz)@^8YoQIf%uU41jVbPt@DF26K(x2Ma1DcPWwIQmd3 z)Or3i2?<-ySl21U!{HNRjYmS>!Kfa8bGO$2(+j|DBdmpQdUOd}J*W2X^kPCN?mOX=JNW$8ds_Gi zL!&wFY1opmq96|HmS;+uX#$OIWALKhm*5j$%goUZcSM}rzY9j@@aV|GVo|xKwyf^B z4H;)`$j9a%K>Nz-vVZE57}VXbzl2CjR({)i#vo@hN>WS0#{Ag-6!Y9WJpo)9l`qx4 zN2%bgp~@AA<|L`4pe$2m9Uq%N+VBqCOz=3n2Ku+g%cn*ajba-9OtKo}p`aN*y2*}9lpy~;|ys;4g zlY1#NR7XR`!`!yziPaaVA@y?CV)Oj^rk(T`Ze2xJLr~lq%;)M#N>C8Kp=62_!1X;w zLKsYw_#caM6r8D@{(hRlt%l8Ew1)MI$4?_&-;5Oe%-Wsdn2Frxq|#FL3DXa#skkVn zP6X@yjivF1P2ol$sNS#wlBzhhm@aAHA5=C8%AU&Gxr;Ypo)VJd`@QT{ZL~N+K!0zS%<;To z^!E8kb>}4QBuKBbKu=;O*Q$LYKJ=-TJ9?dVssI zv0BZWC(n?81PmNvD3KRob_AoD@Gy`E^1-H=88SAury><^9=)g& zB5SeZWoHM>t0gnFTIJB@B0_@ijAyu4WV47Tw*ji~eUeLe7NNUTqyRB`a`MXRx)J$d zk3<=q^Ae6O3cW*lI+YLS%JTI_)U=|a;utmYHYo=cSoWxDnUlQMPqARnbWYuPnQ7lm z2WRKQF!Ub%5@eES6W?@J5;Ahf;cv_H71O+xdLOt_ht&WcD)mPhVI=r<{TWpS`4lOQ z=JeF!@59^l@jtgSWi51BWnJkod=fLNCRAdOyYir=O`%X5WY<7P1#40Yqx%Y>A|HG$ zE4?#C1u95&IHEe=?|2Rb$>=KHpId;~OPPbgryL;Uikn`w+0E{vuvqXo1y02fHJdV| zI3<^NHJzVF_L8vVE@O_L59F+a!bHvsrKNFbtSBaia`fO@az0(>Q*LN$TCk(e;62@Bk zF>N*hm?ZsW=IJ<2%idu9Nwrk{9&!yk_{+lbC@xT5C=0EC>T8rf=@lC!&9$^}Tw%hJ zR~1f?cqX&C7zrKz$4uZiEOKUPS=o>|lN5JFrQ3sPoJOV0BGa6(@Ezs$vc?*;({V7n z=snZiYs?A#4({pSOCRn7F06l;{w*ax;b4Gi+)m(+nk8!#@@pV-K2?v$MaLmeDN+$dcumxt_2D{f*)(i?z z&uV&Qk0TA)2iwT1(k*?Gqi0ZNkGW!I$cw(GyTru%X#my+XM#Hi+KHAv^ZQUVUCoF5 ziJlzyj}3~WQP1+~hbVCJqLnMd*j>&hK8TYZ?01z2M)|s5|56AN3#oG$k>W0Y2$%~W zi$tKPvOz4w1GD~~&4Y~ZE=H3^mC7b6>Q;y=>di~pRPtLWjTTmrpWj{?@AfpTKW@MZ zbnykpW$WHMeTnsCbVQVujS#@emdp=c#>^Syt7=lF8C+c-CC4|um>lk8dP`T_ zr4HpoOU+A0qKDq4*;Q$@P4k>tBedd;8w6bK2VSJHvj3B^lU&ehnpXP-LM_*WQR$1i z$X*X(`(`eEzYgd$P%^F4Bv1Ewn(+DxNgsfph8PiXbC0{0y>53k6i3ZgXaC8h5j1rA z9km98V`ckg4&QDb9!$0xN#7stTys2a{G+2^AHr@USx8lE=~6Bt%VF3Z9k-|HCJN6QV}l^ZgjN1YsjY0 z-qF!?xj1Ny15jWRoK2W?8yTCZr!Fkq@EZ>wpR<)$A!nY2=SNqu=qD}S?=WE8SP!?( zvShdIYNMD1HJLgoXBY74gsL)^@vPaW>H}b4?(J&5Cl!^bcyd^ z-sVkfTZhDO!{R83;?rtt1~!*1SV#zIC$l%(PC}h|QGY{dVIhmIw+~ic9nnO_3nk@S z!?p86!jIqC8e!S9Im8w)7Jg!Qa^LZVeSw6%x``h-&wthX0D)=jZ&I$d9O8fMU{wy8 z6z5Kz8&pcv>l@+{6E%g!jXAhtu&ZXH@taACNdS-Q!%c{&Z7Cd`ItdQa#(2<{g+>z? zTCpUQx2Ntp2rum}^L#Hj=;$$oXjKY7BqR`B=J^*8$Z;_xI^lOqJByRD?!EJCZj7E= z$t7f^4NgnxO|*8yk(^+%o!8DLRq&*ysy}46=*gsuzm!;hBMr){XUP4%G0fM3+&MoV z$eA2_M@cZm(TMA%88!A%L2PRax$SsdZURtqXGRa;oEjCEwGJJVgCoN_j1qTun`Cx#_ptLZI$uh&u&1wu2~HMHj+~v8m@H_6mb=M4$S&=n1=tN@*^H`*t^Ty~PA^em5!=zMVOS zaD`ZGp^d8QcS9V7SwqCdRLS;IuxLQNc60OecTj@paLUIwT|z=8c{boDuMen?6VGrh zm{V8+qVw0Rrj&mSNxKHcHjU;)J3ZU{MYmPk)9ktg7qD+X!RhAqny_7nAr8?eeBha6 z3;#{xn~&uzQA#-q?<!hufh8`+Aq~tOuUn*s-@WcIYD@*u{Jk#Oxw05gzX89DS`^uai`9AJO`&c0DTK= z&{lA6jUI0n+O0+bYka0(77)N|FdI-fWHx5ipZU6Ta3W|Fgd8hHC{E) zO<2y%Tn`I-OF~`nd&-WRTkO%6Pg>p_?2%3IOPYkVXq=5(dp)ZU2!ZlwhJth+lGxQy z6uO*FYW7<$gRqy+8{6$CW{$6UWnc|)w7UV=#d_1;bPRXe%a)q3Sco_vud_YtJQ@O! z`7H5b{dQN|S?FLKOrj{$PE}uDZ?V4{)SyHhk``}fE+HcYaymi8G*e?(l>*RE-h{t5 zd%T&ywEKBMe1hw?a(NM2lBctRb~-^x8Z~crdyj9nCKqmJ>YQ%h93Eg9W>qyzBBJN+ z;+%)4*Hd}5&^s_uambe81M0Ax6i@3kc`f~9@Y2&cjf*i*FpvXe7kC)cYZq7KwN-^p zZGRETygRr#m~OL~@J2_43({K>XtDvYqD2+w=0M~vB4Xau(&B*Dj<$xqnjlEX*Tm%X z$>Ww^#8hO!X@2eBV+lsBoE^(R?>?Mj@*+o}me>vcT{Grk}Ianlg?BRME zasB#dz33iW8vN125YCO4T5k2m=9l05%aPuMj~(``Lc)V3wk9k?2a_l-_ji6hKfVOW z6#-EhJn6gG+4uK7QH;(WQBi*)K<|L2f{LiCXik3aa?_tbt^NZa*pQ*mBjIWWKku2E zEXZ_;s+k6l6AEsW_fO#>e+-Ixh?B62q*v_3dpJ}@r8?j)`n8ze3Viv|!0cTfjaK7(IrJsy_Yi)^+80e;*mu_Gm$z0-U`Zpal7U_Y z^=8}6tO64<2lwO1^qJDI-Ue`mL&OYIxKnA^6sCemF8yx#Y)>|0zrj2*+1xKTc2lN^ zi9QzPH%7Ky(G<7CT3^^$v3n#ZVI;V9{*Gm&;hDMRDQulv+`9)=57Wx=LG?P!L@?A> zT1ys9tANo-#*_^GB^-S=0BZ%uu;P4ySx zYsP<7nhJ@GQ*}^%ED&`1ETg*B6Z*Fe=w&!7WmA*4<(nEu9X2XuV16u%2p$7mE^SsA zrZ)ddlwl<%`1a-`H_IxnY%&1ex%(#ux8Igw={2`|j(r-U-^?r!g=*<=-x9VC0n zGBrA8Fxhf8Dn3fa-r+wCgIH*hX}(mYH{Gt%BQac2ddK23HPmpo-n{@%q7Fb&44*5hHVr6?iaMklEy1ry%vPQ$5&1>?4Ua8$I+Dx0T3eCsp-^;*X zE`6Y^qTFD*8b^XJ(Z9N?(JmCcp{O*6#d8an!DbgN>lrwKCpD@6lAoM}sVWsn)2@QH z>9sMGW5XRD5kW!$fV~MKC1%Y=mr_+oQ;X=TDm5|zHpO~AUHxTUcI*2q5~0tm+@E9f z_Ts&yy$ir`;&_2&ck2q(x@v`Ts}Xf<$D)?8elg)jz@#m3Xx;t80|fRO#qo)+TWK1Z z*%4>wn|6gh2T4e$zY@(N&yp09+sm{RgYqK3f1FF|uUR$FCMYbK!y%4Jlx=vwv3|#L zpie*1h$eXNw*G30%OT}qN~+=09gExk5)B@#qaR6!d3`GCcc~)rXt$lLIp3;F6^+rJ zwz347YE7oUvV!DU2PX@Dfs>73`XFjz5)0|Mr_BN3w|oy#0mK=B#hR`2CXv;2EpCUO zRXp{H!2XW*7RMy9=P?_#QagMn!nI&(T+p*o(VJ3O4z^jDM~B@%^?i z!G3x?6Ihq6GS&iRxrZhv(Sv3Ox_!i+olo%u0fDswQksrjEt|gI*x#?9wRv?+oQ*{^ zVw{21+OPNu$O=F(R0|v!zE`k|8jGMQ)rgG#!oA}>wBqikfC07hN=#PqUtYV&-h#X) zRro&13CU7w2{^d{QHj)W5fVIH?qHML0}4i3u~9X#r;@zI-dcE*whPK>>W%u0*e4pw zOUg5yHcFT6E!>*iiP6fEawCU9Zl)vcg8vTyLqWX0n3ba2x%pW%Z4<-oJ-XrJ<0U^6 zHLO@6N1-rKHV*ZS8h>3_iV=W+s z0JGs}N<6*#-pzHlcEn^?j;p7Wv_s`3dhASYP;JAC#mhN*=!mQl6%idu?+5Nd=OCgb z^yFmbvUL6`GzME4sgRVM!02%!2#)ck#;zDQ4;RV{YcN-7an!hwo{-5qZ+$>)c$6gQ zOgaO}3F*8tWh}mu0GnD%fXP@HnSwHv>uzmF*E?>((@WSK4K)=8w*J10HQ%ly&_9%F zQ#qxY-Y#@Wi3{X;Nz55kM_a-xb%5a*-#qsSIYgxQV zq(BUl4%LeC3Zmm8xuf^(*g4p9VAoOR&isPN=zIU$u-COnPQ)FhC#CT8vriHm7b#N}igS_@l3Dci5_Cp8 zjC!LaR)tK>4mCKtIbi2tr_}s1Ibf{TNE>_T;6a8AdlK(pH%v|yxOp_9^msMqN)667 zPQ)KhV(R4COnhqs1$jkGoAQyI^D@#%da_JXj)R9kL})|=^XAW|RkId!zP1yNuJ&^L z27`fuyh7H0_p_{b=I<*a%S0qd5)Ti0h~_P#`F`DZTzliSgoK5n)7VL(b?L$tq$H=1 zms7~V!S~aqO>1oJbm(j~WM*Wte90;@({pfh^|Xd5oz^~D39vUmd<9Q;H$Iy7G5I-V zWM}3O)Yyp^#y^LlTEmpLK0sSzOKxsHOcx(w;UwEF9@^QAR!fNSfBfP(5>H1tXcg7t0V!|9KE(0qgGE@MG1aE-qPj- z2M0=_H=5YH{|K|E&XM*| zXV%CKBR6;H#7$xz`goudk#^g;4)#}!N0uyMn0bnf1iM<0KLgL}k!_^GrlVuOk}#P~$2ye_w2%Yfncl98Utgdx`T z@HxUG!{mCn{`-v$i0<=R_!?ZKk(*xlxm6p?jWJLo&0w@i~N9I#c@H5@vcz=AIq;)orgAwl%HzbB=YB{KD*jYh|| zKmQ^&t})($z9c0bTEcpCuq9S4_ zD$1knHO;xH>kV?9SDvgiloS`U;isSZW%XvlBYi0=En(18kIKmE{Jdg1U2_dMXl1R| zKYrN6_p8=R6rUVAh?pi3tXQ#obN%b~V#aS>1nFqHtixEiIl zrXp}^1XKb{)eIGZ{}2Ky0rnpfwmMXaz`sL)DX|?Q#Z^*5Dvyoq$MrXNX4&FZtXc32 zkr8p2Wh9YF_Rnl%r00Fzarf|G&9e3E+Ps$sp6o?j<2d$i-On=Npp6Qbb%_Q&`v9G< z??hH=Hc1IdIJ(%E?8$*` zN0|Jkl4ftv;C|eES4X*PJ~O$1tjyy?HI5`CB#eze{vn;}?%vKcYt@7Y9_=He1`h9! zm(KaP#<6&N`mt*HdR8u7Nz?FV985Vt-$8fKrN<4jfVtS=oSBk?#-PL3)0eHAw)5?Z zwS)!*OUJgGpA+7}t^@@H@#V}VZ2Nt;+*vIm$r6)N7&&1GE!#9FAt9d3^laSR+-T9d z8L5fsOnP3am37O#o#^*aZ%XouD9kJ5cxEnvK>hn~8Z zo4Q^{ettel@y8@_5gZyM9rbTaco#d3J?(CdN;eE7k1 ziZjb`v2Rox1z1&8MNmWlx8Hj+c{zEknD+z6kLNLX^dtBM_^@u}22vA`$w)NCL6pp6 z&pgb{ceTe@W8m9)KeBS!TB0H&&MPIU5--006#jw!%$z)z!t7EU?VTkVAR>>-t4r~Z@utV!cTkX3#5Z4m zLtaiPPY-_#pFm&MuKtl@3HfAXrOlYVlaZE1X2x-%q9X~6@MYsqzw+fr z-^vKb$R+_i`uIbb%o=v?-X(qSEn2p~*~OJb^OmsTyG;`7?!9iLcmJN`=Hzp5|6xgf z1_g#n9hg7&3tSuo`KysckRW;+`8W~~>`OpkfF$O2{=Sa@pCAef^SQ3eHH5_lVRERV zal3E|^NYwlREV?I9est7UpM?ForJ;(7!c~mwKsLbWLr(sX3fdVE@Q%oiG)UpC^3z6 zE;=;QN_o4wIdfp=QOe6Ih-)5=x0e^Im#tytf?uSrzwqWr;#x;?>{vSa86}jIl+d_! z6t2#VSU7t*d$%3H)5G%&jz1AusiC~0jJOstG;bG&!M1|N&7-NVt|8%YGEQ1|%+)$J zZTyqt*~f*-;pOAWja_fRWLrZ-lVFV1CO&*^7B(g)v^pD#ON+R@M_0UpT%l1Vam`{$ zKAb^$el<=ySB@XcWBuBnWWweqVA9cg_H`v|7HlLsA`X+ZuY%C5#@)9O zJ^K8Qn(7)>E&74P!zm0J-k;FWP=5dMPg%20)HRdDOHvBAcE67MpS}xQM_YFNahPeZ zD2{qT@}J9LjkG(OlE6(p+w$PR2iUXq03W|KuR&^1#eti`<4-?INTff%Y}mlYHCwph z*3R7A<65crb3Ryzr@K4R9fEl5>4zvTE@Z{hRc!w5knD=HhckZOerQcfYR<%jG=@)j zf;R1%5g#8Vb<&T zedC`TJC;F@yZ?uaryWkd8kxfN$et7oWx@~cL`HfB3%^>ydmq1!osBKS2R~0#co>!C z)fkL5xOzI1n2^rIsV_*|J@T<}j2u6hz<@x~)6->inS+z9Bw!4BBeQ4Br7)`kCx=EP zCnoUJ*e7UrT^yO&>FBhMWTa+eiyi*n0sOe`7k*y*D}zQqM90qU$v&1vN@5aD&JHwh z-kie1Vm|s{1~w)~Y<2c^Q6{Glpfv^Jn;*W6r)MLkO`T4Db_Gt(j-(t-Wx|xU0V-DlF@v5(QLURcTodyI9u4wdCdI^Teo!@eA-}-M7EUHPz8y-g+$n z03ZNKL_t)^n&S9eX5`t<5tOto2}wt}YhV|8_PmQf)@^0pC*KflNz*9FWmAo`fxY|m z!qdZx70Xw$d-Fjae4-!GO~Tl@Z71I?{7FWWR~pLEIvROmE?m#*B|r1bE05Blb6X1X@<~lfljL&KrcLqk^X9vy zYgx5GA*}`u>`U`@O*xj8ML}^9T3a2>TQtGJ!Im%Pe$Am>sg&lI^46?zcz87;D?OjQ zo8Op(b?EqHV`B;?#+=j?+__BQho99GInn{h?kdG2i4@svnmd z0xAKf5@45`POAO6IuTF_u&Yz9>Znu%PKm(3N`RToawoFLTW>O&WWIZ&Ng)74odWsZ z@+wlk7>!0*M@e}#)V_-AE32wxbW34j0r3e5I5~>C7wVxRa54h_9swq+AIJy|2R9pr zz3?Q(MWwtpY$_2EjnT-|K}wn$LHK0kq)W$QXrxGKr=_&An7oW)LPG*%?VF;KV#1q* zFyNWqq#jG={n2x42YYj$mOS|2gECFi+-ZyD=Qs3d!-E6+k#sbLrOTEu{MlzH$}8d3 z;Zq3@4Im{gi$@;sMVH>4ICSI??~I&Z+ul2$PQXsNfvgS&Baw50ha}D=B)SI2Vw=-+fH??i_9dbMW>(+xci+^PK zqVKq&R|_5-ct06w*-RRy)C&>EyYt3URbk-er{2duzz^2|Tb>y+go?^C#y$Ra14U`v zDiTWUIg)&caZ{ecJJ_8mZ%rd}cX=Hqb6>(xX6B_~Z_3XGj~zg(&dr(i@oe_|tc*n* ze!-1!v=?NGQu`$(F_rE;yU_R1o^0N{h1qW}tc{FPYOt8moAn$^&cfbZLy%{fY$vxU zi_(H>!Xksv$vQD2wWUIUJv8_MuDiVzM!kXWzuw52RX<$>0&ECzadFIq+F`aRf}?VI-S$z&0^ zs1OGtf=B4NaMNZa<>2IQhp(59{5-!PpW>oQLc&9^)4GwEw4ZyQ>_(4!x{(^6#cLy` z)|2ZC5|0^ge@WuuG)BJm2(8<+VCh$@SiR^sd0xA2aXdNfang>Y^Tu=2LKgENpizdA1gU2OSZ3=COZsJiZOm_|cNLJCig zd6-UJTCwK)H7uX8L7p4h)`MrC9VMNR6JMT8O(DGS)(GO-MzZdQbu66vy*%&c{;lcP zuOHif*(=GC@TkxRwZ$4Jx@D)oAT`S?i^<7U(nh^CXRJ(t_0~uw+DIHBVS!4$J&gvv zv4*r1C3W2k9}dRd)q`oT%*Mg&Djlav#FjD#uJ6-~2L|+G?XsU){iTuuQ5=FGsI7T( z3IeR6NRN{wz%(39PU3|rL+}svmG!}rwkZ7+$Gh`hq`E}UgyEBE92!NLu@qZ(EhEMa zrl_EliNoI~N^UR^Nimf)UPcVE#*;WiCBE&fDYU^Vi&(5L44Xx?* zST{+4?cIEonQwmCkhnS#)Dzw-1ejJv8!NsUGu@t-ZIr@jiE-SL=z;epKFjzpC{7S4jwmvPS>`Vk)I-3>v&ckJGbv=!^+=@j0mNy zqL|<~Uj`3*g6ym;UKv(5Z$`cI7>#2avvBse?EGsF?jDUux1{EL;hm=m4GZSQ7sgYR zqO3{5LjJaPBqu0y<=&^d(5>4od^Kw+n|@GIy;_gaFAb4Vy0d1?X4~(F=yLP*XmuL4 z@7;!bperLrkDxHOm`Tr0Cc1Sn3A^i3?};|L_HR$$etr1)hu>H{^Lrwqq9pq$V+G+ze1Mf8ye0hm(^lp;5sM7~GHaW2sDeZdPr7y7leI{SWkG)5a~#`Eapx z$Sbvm>Q~~b>wg^qHUSS$S3aCJgZ#`2937oVO-SLzcSjQ2G>XqZox|@dcQI{z3u7Zk^&>0@wn5;dgWC-qQ1A{Gci_*XSE=iKC7&}l)S7${6ai& zwR4woNAZbC3>`a=_BXa-?e}Y0Izu6z#1Rncjh}NMyASQ+vR6NUwnuT9ozHeXAAgc^%nWr>ocFl!O4Ni6J}tqbtWw( zo#$R1L1fcVKL2bE2|JVUboV4FA%j632G^4UOOETW%9W9&Q&?L9WWS zR08a(6zdf|sLPQ6)9MsbO4xewJS^+uYDJpY`u~-4#38HK*{UB)~)+AXxxj5?v-=h%c1@J6~z4 z*Zmy?{w)H`+DB`BiO0wFr9;P#ESvj1>sS3kL}WyR;^rm`87686iNjhPT&GbUx<&L? z|HpfAW6$>NKeCsioH9vLiMk+YO?2$kp6s*&UK=q@rr=3Pis!+n@8P;z+RHi==(T)4 z;|nqqa`E#BAYMcsJlT)y?rh8V>waL#$G;E}8%FZcRGxeNX__=|!iUp8q_C)v(PKwa zR+-Pfy?d~ib$Yb2pnKEU=J>eQ+UOm|I+fL@Zw}7yiU`Z~R zO`=G-1~DF4O-G}oxf(K|zpSycVCFJ*ZQh5Mw~tISH}s_kY1_FKJ9g}#qO3yJqY-w+ z#$HF~4%c&ZPYUn6@&PyWZq9&*A7s~-Lwxe)7evGck$g0R7pD%z!_A8sZ_Fn-F`e%H zujAhP?%}TuJ7hhsh}aOB9!lVBs4+-(JaGsA>v8-$P@Koc-#4;m{vR}siI!xhsOe>UNC+Q&G@U)a zC-TzUqlsx6#m67c;OLH28A%n<-jAWrJSpp1O?he78Ht+u?tjs~>HL+Y-5OXZNJ&a%{Pd@Bb#|3eQTApRw6 zud6om&Ae4aMukai>yl9^k+CPHK&&V<$Qoc(73KJby7TCmeiX}j7%x8;_Yl!}O`66? z`!#FIm!u_T;q2-x)4M(~?mjwn=*Y5pYgxDIXCfmbEYm?qK*&tQ28#Jq&%Rpbmoie( zL-7n_#@^9}pm0C5Hd;wEWgW}G&PLSEI=2WgDS1JrYD5j46z=MK8+Si=8=L<4lX>qg zCps#s*1pSGA?se5&yVCiO%NC;4bP zFTXd2fDnH^nmV1l^b%Yh++p_?9}*iGE2E*Uv+_hcS)TxN7X;V`A5)N3CDTC% z?%;pDZ{>%zKeBYz5AwFJeLM=+MjlLi?MqVP9n#ADm8O*0sz9uyxhvUbyxp!b+y7%tP zUw{0;oOf0b9VP0andIg*>)Nj|>qz+j`gE^OA1PC6PMb8Fno=|N_6|za{F&B6F@MGL z&>2(PSuM?}$2rzBl!4Y@M_`^VBd7 z?2KpTJ74kC#D27I*O5h^tz_M|zscvX)c9kJeeZE1!y@@~@~vsqYJ2p(>(G7Yt;l~!3%UF(GYyIOEQ z&rnzDH7WsirAqGV?f2zCfC&OiY?4xH?`q_RRFwd`dUZ}6*?$uOl>qy1%3U4xe+U6l z4@VGC@`r`+YMtS*7HDr!s9iYo6mPCPP9(a-XQF-ax%?q$A;5$quztjs{9Z(WsRY=6 zNc;Z%q5kUxn5eB#RfuPpGtUfvik#FuUVmY#B_cvNW6%B*-Aba3d~q<%ZipcB_%W!o zLua+@_JoUn(wCUK2*>gXotg4)+hYg{+vXr8N0&H{+i27#Iscq6> zs?xA_$y)N0@<__br01huxc#o~{JLf{-+aDY))KKgm+Q1aIFCeau>^*UA3*!|ZTWiU zQg-~g2OnQQ8BH?w-ND4Qh$c&>Q4*VdHS(TDvk7|}C(@5*GyVNfx#8YsJoxYcw*GdI zIq!=|jWAhPXTr4Ma>w!X*XEFzn8twNJ-F$X8(IC$&#e4ry~IRuq$m!vl$2zy>DrD5 zhTTnNO&JwAMr_Rv(upldG8wIB)UjsCkL=yEpGN*J^nc`jVj4#)5qqU26do_+*R{X1 zcl&;PeEel?6RpOEBT4bxIpAh`_36bQKW%63bkWbcnnHi4QiX64>h*fkj%DKFYLBO< zw}t$w8_VCBb*Cr5JmjuyjjaLyU~fFU8_Cp4UVdKuxMl;Z7i=UpJO-nPWYj1+EF4#& zrdhB4chIA6H-6ptD_>6gmMB^0S}%iGPxyNn_ZHPtc}Ab3UInhkbt?#n&@X zl6~TzNc$JwB!Gcq`bq+9>L^k3EE35aHJH^JYD|@s)Z{XL;&>`00XC7aC=n4Q9InTB zecFpOa(ChV$sbTtY{tRfk)(t)#=kw9#!X_FF=Hlwt=r4^x5v=9Nh~uzoW+rysj}VZ z&i)J@{y2&I)0q7HCxk}_qcw@>!;_DO1eoYo1$z5(#*7<9X>kRUMob|zItUxnsUj>y zdcb5ReDn-1E-noSFs(_4gUNyYY5VzK|7#=x^Xs=;Sop;XORXM-{8WyH1emCK_Q8}7 zDK8KNm=pO0xwP!qnERi)lZxt6%JcQuU@x7a;t+(iq0-Fir9aB3Nk<1q%BqU+40BQJ zYg)ccIj78TL0FyLzSbdSA!*GT3$kBRRuN<8oUDC*s*yppHE!g zfBVCPNNWDbrE3Svuf@}{*S%u0FSEZ+V9r% z9um^CX*B5qDo96)AVs7jMQMr^djmhaq9PV-6hW$V1S!%xf=CbPosizMegFT=?9J|` zkOU9|-1|L#A=$ZeXU?2CbN78uX%f;D2DzS*Xs>-f1CbFCSU7J5E}xBuySoP?h^N0c zl11&TUA7f}Y}kk4PxoYcvTglAY+be;ArWCnj!VFkuMbDZ``TmqPpeT}T8JlJ7>&~M z5|rndAOo-4Mgt_r43kERt?T|oLR7C$xgMrWX!pOpGo!Wi#aP8^iZ2h z@alUrQC_CSJ2O6mDl`fbsh-ziqnxm{TvRJeVz0xjS7)KTM1wb9P!BWh4cLdtE>4@bEHe&DgL+t&F&Zk#CdlF5e zqOf@W8XVev65R)O#=}GU!Og=FMMZhgYRcGo;1lYNn9He{_rg-Z33zw@i;!xKczeb? zG!Jfpizyc{?D64vXmDrzzVUbbzHB!F{es{cD8n-^K8^I0EWH2fXBhls4-6VK7~3}; z!rDb^xxFFckfPY*QZVi9QE1n;4I}FIY&(juFe10>ZeOiwPEENQ8CNo)kN^R}!Jvo@ z;i{ho)V8vB*5@Jtc4w{UoiLk*NPtPDp026gVl)zme0V2UD zjbLq;NPyKQA@TFuO#=Tl0Y>r?YTi6Q`$;r!8HGhZ{(|$zW8v!+P^Y?00!cCB#mQ*h zEfUL?FU8?Cm*C~kBWoynK%*;%gS|6c?Yvlo6{W5kFs>{541N$Y1z5VT@8>OKsnlHT z-B~K4iPJ`+OaHdmuzn-9E7A^^M_ z`5`_&2D3hzie~pj;>Y=maAMabc=|gqfr zQC4&8%NKryPoM|%dMzlUg-NetkzyVmK9ES|%nLp}B^&L!v_Om2P2m^Ph@}uL%qqr@ z-z-9Lb~*F-rnS*Yw9F&?(;xl^#sAl?nEmFva8kH2z@G@Mx?Vg&OvWOya&imVZGM4` z;ArQ>WOF^2EdkcdoC1*|z}#4b2$6pTgyv-xqUD1TnDNp?Cf`qA{0R|Z5oXV93%NnH zxRMeR@WGF-utN18y!Iv2NjZ5`)-piSC#jhLqX@9x{d!{CdLqEKAtYRdWCC*E9EFY@ zJ1{`~&)*N=qlGWBbaQV!ITztUkuWMGh)syW8{a$+UvD4${P8jrWfnsw*CQb<6AwJt z22m{|;T!6WsHRO&lv9EqzWN!ZxmwsO92i(@WGVVA1lXbjok;=<fc#%m-|O`Wg5_!`BT8aUWHAuc`%Z+`e9qMAiu!GZ-iw(}g` z`ScY=;C=Vi_lQ4}293T9kq`J|$}^J@cRm?&U;P20k%5pJswcphEvkSSB`g9=r!2zh&j_$buy@l@tp0`eKTqjd9RWs>Z8=1TjK z25$Y2^zVqy~B|M4Xhlx8C}DGgz&Na(Z%WanhT z&D{~f0V>S@bRi=#pM7Ntw0bp`FJ6t*#B>y;8PI7!Q%rbjG)|v9i=XDKt02IpzdRA` z?r({uizS@Jp2^o zBLN9>@FM0)0T(9^mToXMF##`sHXR}1LHOX!&(N=Le{|~B0jrm;#oi4^m~IW3&>j7U z_Qu{F2eE3w4tNGSz(gs)jXLPm2DmzS!r9T45wK)I<0=TSuGqZgcl^HOAfiH}V2~M^ zL&>XiXE37U=XuL;>7Q75czUrkaYTT1>eKwD5Umqa&-Z*jbsBy zO&E!ukKBt5zpcj~tB#_1NONeV8b&?}7m-vtBc)VL$H(8#hC(63JFmYB4+lRM!6&3G zt=m~f`&9z9u8a{c!qr3-1wBRl)qB6P{8cP6ef*1$pljFBNv-Vw03ZNKL_t)p_;ici7opHF4P=#O74$FcoqFnhs^aB_3R zy5$>j`s8_N3xMYL`Qxcs6A>Sqf-h(N%2H?k@7ot4(HZgf^m%9=(hL`qFJai@k1_&m z^M=jX_N%1^+?yY~h~V%by#CHBc=D+UXzb;OuRmS{ZLtwbCB>0Rn14uqNe%*=Ho}aV z)0mUi$8UTCRag*<_Ns}bu3pkoKq5y%yd@23P)Gn2a=Qvgl)CBB-PuYc!0ygBX#0m$SV)u3XJPWa|)2 zojD0vxmU1n-yVz}H5Q4NQ}EdvKOiLBAIb3<7(aD5x)17rg9i^`$){T?(g(i$(Nl(Z&1U-9q!^o0Fqg7+}lV8Cnz!Ua%N*MKeq^0Dr zxrT=YFu*S>Ckw@eYB+l*JA^n~zVP`5EC=YU@y1Lj@x+AD!A8{RZ^Ko-IePa^VKEuG9Ex zCK0Rw_eZDpLEnMBaBTlsEc$!}f+7Qu8k>$+=T1dbWE1@K#Y!C8e-dxce;SQ_8ezdl zOHhzi3aOm}>G>IWZ{8a$)zsTlKSpFoIC66{kXKR;PfrKr<&`r*O?hhs9=Nv)RxMe} z(nUq6B3MM5QKn~Uqn>_q658I|5(|G^gwuyF!rwcH-AD2$DJ_D3L?cX|H3~^d$@ucc zrJ(vG1P)V_l5jEE8A?&C%i$4VY6ISV<|71z`#~X*BPlKwZ+`d^!kY)-`)?QE#I9(B zhWjBY{tEv0)vNIGY=p1B`Wn$ElQH|VS6Ldk1@pc~;`u93>q-%RzaOT|oQ(L3DRm;i ztTjYqEJwMi5U;)cGD=EISgp0da9_xcO6IB0Q<9lyEHN=4_06wmz}d+eU(Wdfb|x1n z6be@VO=+;hg_MivJMw-E9XSMhe?NrPi?$#n%%2m@OeVZPfv|e}lUWE<`Lmiyi5D}> zgp~;fSs7-{nTj%X31(0G7CtIB$m}FA7){8E;lyu9go-lVRixI;FUUocR^gavwy$5j zxv*aLRgjCitc;}%>pip^lu9MyPp4wZ!WBH7TYVWTVvtR8Xi<)|#8iAjwS|ld%$oK# zf~mF;t7UZUNEH^LN~FJ0T~V`(>Jmw$N+c!3qe*u^%zEWHBwR?x$1i`42On*P2@@yc zE}o~lg(73$FO!7H+~$F;qto%~7ethV12(POgsm$Mp+#^DXk}^`tOzi+W2Gh*!8h`WVdy)k z8@6xTi8ViNMMPLrmTHhlqPkw?Wo3-??>_JWDC9~coJq&8OIITzoMcw_ek{+EpiEy3 zxtkuZyf%xSDet`SG21T`w^bK{MzC*WuY*GTSZdKTN8|XI3A+wU~(jYZlTJmy)9~{PAJvGq^K0F#?Qgcr`_AQZxn(>y2UK z9>$TA2Ur@-gy>||De<~uIgyZK0ix7V{4PLH3b&{{; zY?ZK#1rw)DLPT?asMY1zv2h>Lk|{FSuC6L{*R~T0u)B8Y8|=hI0!$>p8mw}P1G}Rn zAQE7A)N+cG5hZYK3DlN`s=g6mtab}aeP}YLK{WFEU-WlY_ly5dX%EE|h@y7>7ZPX) z1Q<{KBSTC=9A5eONvOhA_-X!fB*$EVtCMRjH7$7RAfwKNGD9I=m@^&rE()w!z5z#m zKgUEG+R_L2cIbf2^emh^bqR$zC7Auqi>y}ElJA$}?|spj@WN1Z?cNpp|2%}HU$0~L zjd^kidJlU5S-DrS^5^w9vp*4?1~tKi$0s5ywFq-w{D`%iG;1vG>)Hl~4;{qP&o);q z~G$I?r`z8!}GJAfljZ*(j`lAc7GE4ta+ypv~SfBiRUwL=;&c| z8`=t^#*fCa{TCSlMyUlU`t7ZGGvVRxiElq#1a*lP85vo4fBviR3Gl|ojT^CT*hk=t zc5g+SS1D~{*gg1p!7sSOY2qw}Cv7(MY(TusZzd(X{d;~P73FnaZS5Pu*42j9H+ zEBo8{=ZB(epDs9d@B|iqyc*u#9(b_leb5>-*t(9_ETN0iY&K)-wG@~BNq~pD7YfVr zkaLBTom1Z!iT0h^;@htm;Ow6~^;~eMKV(ulThAh0E?)iMSwJSiFW;`g<>M)=jY<{F zQ)RJ86@DLGAN>3h1C!sM`8E69-c12@p$>yb48owX-Eb-996o>f7xvk}$z3pb@KD5@ zOTlMveZ~Iv&ZjT46rV(ZrJm13nYI)WodYTeu-Z}}T2`IQ-zd=|DLx(V&3hFdzOGod zY&ni^yU0EZRRys$rKBHMb23p{$VtQ3znKDOCl|aq>5Gc)B2;0F1k23JK=b>;@x=6r zY!6aI5?wyNZg6+-QCdng9;<}73>yeOSv{V%DR@Jqs&8N<+{Zx?Layd4`B z?L>H3GpfW^Bj8W-lA5F_EXzlDt3Y(=@c_=8JcnaPPoU_E4i0|66VHx8o7Qcy_xBT6 z{@riziFC)yuf4$PYAss05NCELK-I1hrad^wDp)zk3@@cz!q%QsVIGvrAYX_x6jxuu+5I<>!VIhfZVR zdmC&bz=!}_w&+)!-ggmx-a(8^VQEP9M(CyGcbZUL2yVQI(BXgrOXMNSMJ63^*h*@$4(!Oo&!4K z)TtBr?p#-!STU7+^eR$UfugZU1G$c14rQD>dth%q@)DFQGS^8;wa|m z^2@g?p)UaxQb*(#=AgyB;do-oL}X>;;O%EWWqoxU)(#`b4?}Kh0p5OgF8kZa>HW}W zaChWq6=TWIOL5^~Dmo8piBS`WBReA(|9gh--6vjtjMbt#y#E9i&s|e7*M}eL20J@P z9Nl-4=@mI3NOft$;Q8(gBAO!mico4&9^oP>H4u9{D!PoEofUxjT7MWMym`yf576F`!8LvH# zwjEkw<R8Pdk$Ka=LmLeyilr9X-cn4NGYk(r@BlKBa`4sX-yrL93Hp!kj3FZ)#-($ynD@qF zM70gW)K^Bc2)}oq{DzGwypu0RPaKZWCXI3K?`V8wCcxhP=|x&B%%1TTOKEvI=`z!$ zUPC%#^TsXseHo=fZUU`I!wAzk3tnLFSQ4!qYnN@n?oG!K6&4OX)kqWUuhfLAIakoS zLvxIqJ^~ldU&1%DX-}wZ>@$6Tyk^QtsfnE{EYJ zeV5%K5@2`Pg5L(yy^{$r62IV7$fNnIo^$kf0_N-QY;_wft~f+d0u4w4A_3Nb7DF70 zD1m=l0wMwSZ%bJ0zoAK>K@eaj2?K~@lH$;9U>l5|JPv11MPvR4REs6l46N1+`d9RpAStXK=3Mh~T22{#uv#9WTYM{mysr5(DQ7=sZ{^v8gK1MtV{-8=%!hL|xi&;7~M z9!KjA_dtVE*t^Q$>gtT#%tA=?3P2;nZ@;Z$z}mpkz0jwBPblpaC@Ls{R3d|uqa$P{ zJFHu_89R6WiC)7xV%&srIJoC;Ec$8*R1qp9#wFr^^Iw6drzhsUzW~}&J@Sik(X4e8 zhK_w0UX46alwZc8*qmG)k(QK+4`2HN0U?1XM?M=LrI9KvE``3_z#>5$-5n5fDGr~$ z%>koVXFrc7_e7zfFdtgI7D{^=T%Fx;{?tXx`{)O#!h;|&QH?ha`BCadszo;T*#YR; zqZf9p*^f0VH`mWo-AsT@oH+sA2DD{GAS7>2YMEFH z-9l{+UjASv)LISRo5AG}tO{XY*w`G|UnGE3kHok%Oqls7tGl*o-BxT`zLTY6qi73( zFsp1UhyYW-0nv%)@aVKY=-IzBveWb7=IjBDM#o4wwYCiFR&Bwdt)~zi5{9huBs}}l zbVN0YM1Ec#OGD=A;eoV_bbSByPf%!`kd~NX z+6ycyifZGydMhE5$x)D{hCQ6vxPD#yE3dmlsR$EO@yh?6M$=|ZQC3n0sX>O+#4Gsd zgU=BW5(bluR^Q0hK;~e=6Enx6v7ZkNr4ks`GHhJ46@TvDgGQlFY#&BOMxwN&40^Qz za=8M|?oNn}PQb@+eTA?v71ui}``XpYzJ6Ut`zq)m^^=g8h~E7l#E7RJhQd*fGl!z_ z-q>V9xyTkHJ34Umw$iF`q(Vv-Ffc4#}xu=aH%K2}> z!fF1mWHO#-IAJnj$KyjWA07*w0kQ#9BCL1|gRC&5=eFU(JWP|F>rqr3Y1B;703~ig z9AmWoIafg*-n&ylH$qHIOlFH8RM1qb<5%@|3rMz_pAt1gq5Z`Y&c?X>ujWW#gLuL7 zj(~AFLp6DON;mtBa9=zHQo1f08G(#!?C3Lx7>Uj&`T}}pmT;IzT`*1!PF<;qA)Y$_ zfZ1JQ%I}b$QRD>S@_g$>U8=z)J2k^+MJKx(W8tW5p~IncSEoe>&_zlnj6fUbfpX4% zAS5EuF&sI&ukbpx9#^XilW|E)wtq-#aPBM+iK!Cw&m2<2NfOgD+COxrvp=J=kNvgO z^goQ)dAIns6t&f3rhc^Uy*$Ay7U7eXAquAT5F{ogLc3!rm>Z1Sr&h|;`z@K{Z+~$H zfd?zuPWB(-prK+cgKK1mcV^Qf5m&-qj)`B2HI628E#E@Ek?6BXh-p0Jl=p~v(#{+Kf&Q|IHAh$ z8;p~hRoJfs`|M1G#a%Ex{rc%|1ER%crn^ZRm;n)f^WE!3ebn<(VJ|~JWxbprxcO!Y zz1S7?3(c;rkl?6nwK_36pD%^9DsahieIUBu-NMy1;`a9e?VX(sY>6AKwxdrN5;uZ% zZs`w`*!Vnt`0xU6hz|ZYaMVjXqb?Ot=?F6d&%DMALkH3B{(TQz@daud>SOYZ0ntf# z(c}+LPonnvNU8MtkyVkywo{|MKN0jgUBf61!tb2f(dC^@u}n#c7qKc}HE!nacbH-Q z8-bb5BUBgvm56AVI4K7WrU_MyU>?a9kPRemtJ@m=nNuibf@aKo_u50Z^vm3baNFmc zETkexvzU(xi9StGzlX)07AzS2cHu-OG37V11oVkE!2qonGf#TgYd^f@98ET=VXt2& zAr*2?V4Fix*ppguf2;&B8{hoB`3MexWl99;yx1%a20 za7PriyZ$fzE`roFas#jGe*CwYws&u~HJ-?ixc^GC(;LJ zcG-_j-Yx9ceEJpAV;}tKFBifmB>B^YHspzk=!U~GjF<=#<-+o2lrK=f)2&V{Wff&D z-&!Y?FlH3n;qxPyr&vM51HNFQDyLu90ujZy3IOm$ZuWVVTBd}!`L6LJ=Tlco zQ*+&6gHeV2E=|OeA*7>oijU zp2bQtC&44P4*{}qNhpXg172^OG{zxTWbPe7h0W(a zU!ciS#6sEh!0LN>Cb>bET*2{DNxVi)2Hu_L~PcJFshsBf(r){ zU&7|n43{ZL)GH~wK=;_Nc!G#}7z|T5oCh!P`GvaO#mL?QhDD}?rK$$1^4 zrYum}Bg5o|gHiM_NQk>2)?!OPkUmioEt@qC9ytwyI~3zJ@(R|8McJ0RpTwl3XtY~8 zq2+X{G=E*~gwHg#C{foj9>t@8ino|#ZlQ%9gv%Fn2}Ywq%NG^rXVKEd*778*~Z$lm2yQ1X;@Q_f@@*(vaf%8*0^^944L zwY0P*c?h6rHu3oiR?ha8Rtai0+Qi8W9Mt0{i#bUdCEi+*Bc=Hf+m=|t2w%Wp;xwU|NxkH_skGr%+&0T-%OVD{B7q*8eA@NnXCh$ySAe`=UOTsD`cf*3?ghweuc%)hrfWSnh1XIS zlr5=2-B_U#?^eyPWGzeibh(Ml=x`l%ZuQ95t+~Y>2?2W>>EW}C^2@XS$CqXk5uUnj z5Us516n6mK8y1vhtQic_nWr*peToUC5%wKDc3Vm+=whj$@a_)C{yEbKs-5 z&kfP-RK*n2RC$9Dz@)my;RNm-7X<}It^$b)=`)lJTAo=sx~!+zSVTUn2MFQ~AwJ!BSL0%DOCZ_&=da=4(j_ zOnW7~^2Pp^Dz5%jQrC}58dW$L`=cB*RvVHRu-&d9voyo%bYQG?z{2a^@p>Pac`yN55P2m}HU ziH89c>?C}A;NeI_;(7etiU}$qJE``$^BLWPL*zsG*X?oG9M05nSNy@l_v$2cydk~s+L7z)Mg{K)ZrD6d2-CUk5p@E( zs*Dog+B=6Kro7z7Fu38~LRAaw8Zzs|dpc#gPTt z2`b`c+*jo5^f2Z`XMiW88v6fOfOkX3{h($N>1_`QC=oO-V*>r4G9zK65B2REZfY?v0}8NF2A z2~l=&Ko#$Tz)%`^9VJo#T6s=JF8DVH(ed!KN}N^zwb$9$WJl0%W7pVdx$PP=}P7>LzFEi#;2k$&n9WpIw_&5?Q4 z^)=X`ObD?fNJaMhQ4!>4Fwjyc_ImR*aa~>)*xK6UxHtwEWp3780ncT}W~(izEWfSD zCMw2(;jNMp^oSS`*7XDBJII*yZ7e>*^Dwy|-+fOzNa```8yoP1wS-Je*zxlcL8xk0 zk!KvN_sCWdJfVD5Wg^9b86D^uR=h0*S6;W0oE4Glpg0-}O4s7QIiD2q_JBQa@Ut*Y~GZY0NK0ZSri<+?_)LY6eEeHgTsLsn**s_R3<$SBE&H%?7e z)X;y;GMA-HxRo_HkO3h^f>w<{aFg#`_e}6=mvwlo@1xz}RD^YZ&pdXmE8%+5L8w-V z^j{gLM`;cp@UFc>LANUI$opr|V``G47Ur_9pGY-2EyE5~%lnt(_dqUb<$^exwOu#+ zGlgg~Qr{Qfd&5gnEDUSed?(?XOhVHl1ST+@;)ggOB#LgouXuS|AUbb0sDU;}%2foG z8+-Z*vi1+`c4{_xb$74UBE)8JV!Vzm_m1}#XR$nQN#+3w(Phf_?y?9nj@VTO>d#W6p?@PG+rpzn%|^r83nR9&vHN$-dzlRHi00vr&braGJp1CcINDdHo{ zvi)`miFx$7^+A+R&)z3*f5E1f#G)vG&MKb)9TIH|nK-kr)(J%5RJ-9s(%aWS;F5w; zCOIod;d_jI&Gnw~K)M)Lyu`N|S#<0oP=DBejq_K%WTk`ME?cL*uqlDH0#IO?l*mM3 zH}?Q2cd#qseu>!UI^Z#+D5oq;jnRV6UrCZo(ksFS`9rvd?(ISBH)rhRU@=sKEDdb- z47u_Mwu|U^nof@$%iSnLa(xjcPXI;+236aAyUJjHY7C<15xEG3fe;!KU|ug#t<{3d zqfTU0QHW4ongpxN`jZTW6=T-{vfch)UWr*_oNFW4Q!O)66m2<@qIn-SXg;xnx3y98 zEvbb9X=5QNm%-xt=lLlvJ>u8!A_DZ;b;*FAx>ZikYtm z8UaaNL^f8d_DI$;7wWYP!DY~5lsKAvOdGCMf5QqNT)PO;)Z{Mo(t2R3*b6mv#^#;4 zxW{lnj)>hu5n2TSvAy-pAf6i0D1Hhkm*{TPjLVJ5-k3xVBvz#KNuXw#q#mAL1LY~g zWhHM0>;}q7_->TY{~?N$SfTPyM7D0<@AOu?+%&|;*&`3yYtns$v z>(x`cg?AKeq+QJBm(?uVZS>;Ts7oD%m}mQeB%GmpW!KF&>V>dfc(DG zQYS%P(@&Aak$OBW0%zWB*k+Q*P&}GT$d&c<&0R4)~ zTAL?}-X!BHxv8t8a1aa!TPX%YjbK*;P}TB2)+r4%z z`v#zjy+o|YSGhqwwb@bDVd_2Zio#{+cPRczBgJlRppY<8HRw)L>fiH>merpZ_X$^- zQTO%s@EW(|#{835I#~BFl?MK3(YvVIHKBYqz=!f_(y;Pz{Q+3NxffN0bi+k8vb9%t zHJkKJAC;K~U(AI;+T#o6QxN_ItuRi-I$prGb|5_O8Iw6j#7A^)WMc68GIf|dC7iy4 zSILD;&(YlI+sLl~841_%qc7XE4KYTMWE3i@XVJ>Ct7T=qh_D%oy;#2Q!J<^qh;cA} z=QAWI#`)*kA3fH}MtfW#Z{eB3RvPWpb0p<+1uw=*-(GdUpmAAV745n683LmzT3e{x^frKTyS$LYil)P=Cg+ zvJ1&@@lsH#*QMaIp1xGY`*3B*ZX|D6nkAjLPPn`L0M;I|GD*uI3IYDR+XcUf} zlDoacePd(^OK7OD^&4Vx|5lwi=)hv4GX(8pfmYwmV-(4>b#8qp=JW|5Wr`t-W(|9(aa?x37Tf>f27k4|~@tRk1pvIy2KWE53}wDjVjygy2%^*P_Ylr8E* zP&6uCV!Z{l3%XM5Nl`THRY|#D0bvT4 zqt-T8nr&F&Pxu%%UW<**UNx9s{XLf3Bw*rya#!G4?qSt`!5#u|;2NX<>$@mQUQQXB zn7!S-J(L%0J{qjA*o+GjGr4-LTToI{%W34m)Q6F3IpMc8K}4V*KCx%6tuSnHo1DCT z{;Yq8k?Gdt6Z2)Yu$9?{vn5I-*REOj5uepkCNMMRL9#YtIwNl2Bw->4355(DlbzW= z#HtGPI5??5@vK<3)YO&^&7MD3lepZH#nCe`5GH?4P8%&$`>U?mWC~t5GdwUHN?Q9} z^EYIFpEQIWzQ3F*Oc)ulka5UxVco8t^N$Ekfn;rFQDUf)lC+$2aSwe8(0lMie= zEfcP9ZnXS2ZPg-Xk`bQaf90w%qc_WUmB2_}U0Fr-M)^Zp+Mu8=JTByH)ls#E786b? z&`9I0yjo>a>rLU}w(UP8FuCH_;v^vviO`&SIp2^vgN$2weh~H_A<|I?D zs#;RM+@E`053`t_-m+1b?~hQ|JzqV6n5mE=@TE_{bQB&{U-dyNvs0fbUjUK?L8@ zf2}lihr~$EkH7z&e+^Kpz17CX&xH`_z^5gv3ZVq%-u~ZnLozUQ@>)q293=1i$`WfL zPcA%>(e%QN5?35M;@ujgC*A+bTH|ak`^xH570I&s92q=?PeSzy4)olL_R?~HRpQ2y z@nwnPXi;Qn9yAk|eiWWV*97lSq0ubFin!5!m{wKq5$H9+O+rg??quh!U5$qNUKwopp-ycS!66NpXONL|C`jt zR|8VzTN>@DR%KN0c=W&u^|!MzYVp@d7Au+-`w!lGI@)8hSiu^wl)wrha5EujH#D5S zIh!#$dTux0T$&nnFk*Fl(z<$ZI7eK8}Ne7Op^clPtTENL)w6tIdU{MfD@4o$a%PWjLxAID0Z8`r< zK&uXmQy(Wau%Oz6Y52(g(_V3C|EdrI7_OKD$`L$vymlWr(9(bE{&#y{Vz3eDo4wSV zI6Cul?UQ<=F07 zz~8IBi;5Op*lL9ydW`~<%LZ=%b&asGROF-HNTmgn`)ZN`PSag3OzHWhl0nh}Bm*Kd zXJ`L|VFA0ev%i$_XL4x%EB&&rsAZ>62Yv$`MX_V)sCwrt1LiD4R?|EH*ao*j`Bi?0@BiCbNT0>fw=?zkABn*j$v<7S(!xx8{zdlz2CdsH`}_ zU!C9SUbxmde7o8ixZgkzlugooP*II|n*ZBHfksZ>X<4KuS%no1I*^-^V zZBAU(Qsh+fzoo!3>F}tvF|)-yT4EQKXARk(sJBT^vu`x`e;1>>H9t;Q`%YOmHX$B` zRQf`-(p!7wHQ1@Sd?5Gs(pE5Zg|Fsjr@FD%T(>B+>Xcx#NHw&gC#kg*zj?AJzb<}$ z9*@ukx9HaLxy#6W@h{Dit7V0KJm*CN`O>{m`!JQcWU49ZWeWpKOBizA1Ht&W#@_SfpuvV}9DZ zl<;hW0w;a=-~RA1hOa+}cs>~fJ$w!^!Q|3$zB}!z-l$C;XA8ufQ@?7CY(G6+e^<#q zsC^EYB`Wxe>$KQCI-^h{+R&qrj~!804^9z;oB8-0K_Korq7v6=dKQ>HdRbIifZ3VD z-^_8U!d|qp6sY(|suP5YIWH@wvCoRz*y3n?=6SmOtB+AxmGQCAQeV5nH_~8^Y$(Wj zj+OEnG}M=SAM;90F_Ql4wLZ~8D`t>WUD@W%DN)(-XbdDrPj)&xu!BFh@I5d4d>O=E zYtM8Oa_QL0ZarH)k_(xAesHc7{PUupoOmn7sO~ztJSrZxSw^l5W`!=AP_x*=F0nnt zb_a}}r|OHfljj(Dz*MjHJ+iS;;&Y@Elxfa=5Y?gpn^Xt42IG8f`Y{;CnFMk$-`E2YOF^z? zynivi#|VEa=5IlUrntBs^1$dbJu;rl7&tTpYYAU(<(W?(K({$4Q1))WnAPo;-&i`X zv%Ca0x||Y(`me@{=nEcM2OLBo8JbT^Ex{67o2GDN~VdYEqc-R zvMjEhI@qA@yoH=hHt(vB^dp%Q2N!;A07D-lO#{IFyUmdr1`Lx8;7L7?$8{krY!k>Y z!M!`*%rUV6?#ZgOq5nI~LeVXJNB_5Dua;m?QA+oHJpRS^^Zh~E2R!W-JHjXfb$|}K z38>?J%S0(6FRf{IqA9MIOR*CPgKnw7;Yq_p>{=nxch~eheJ~3L!H8BMsj-9?M~H5C>Iy8{@yLTJ9)V=r z)sa)!7*2D4%?KN68oxlys(S>{e~f*5u~OL9@U&jo;W<$Xn(6igA*n*np7#cEI)}4U z$Kb*+MRVxrz!M5ed|PD90R?xKqkH`L)WY`dG}_O<8lhgQ=4BtfpJH2UbL;DujY0KW zw<I5UMn2wUg65khVE3J7_MSd{4Eyx5U1>edv<+*^jDqHjUD) z4uqkD0+*wE>vUvOC(nN;z57W=bW=|>`F{*tv1oevmM92&KG2cgFr5JSzZy<5ztV5i zws9d)`&jx-G>K`?;R7`#WKkszx8hOw28GJuB;yNYAx6}$t!x4p><%_;jrepFPO?g( zc$Z>fN8p&#i(;!s9getkG5q%W(&tP;P#==f^z(6Q*~LNS(biGW$26wE{-}2MvbVHN;M*npiUcV((=UsUlU4BZO;IQ>(!cez69Fn9+@Xm#=|XpZN~2>_0~Bs> zFO_N&) zhcdFW7mu>vW-OjLwh~+BHx8|p9r3c0Um3(n6Lm?=wbQL?W$Y{bb=JY7HE5S>C?drM zvMcRdZ_lK@)e@RoaRc|EE^76Xn9x|TrpL^8e#MkR-yVnbZFMPVt#>X<>Mq}MpD4B^ zuo$HQYveStl(+)1!?FR)S!VJ9lj|^|$&cLd_k}Z-1~`8LS(W0u1jLKgCTD>c3>Xmi zy_^hW-ot2nQSBX+11;~x`;TbZ6ZIGVl$`wI19<(L>a|`l*q~B(^#Q1_3A4f7b#s;Z z+p*SWVDFo-V?X9|v#~@PQdMu2SYLdO2%K0Ht;zNCY8n~T-;Z`3y=+7^?E8vGG_z(t z`0O;??u@#5e=%Jz6)VN`L?@l!FC|D# zuD#IrUj=NvDLI@H4FQS_JYDOP@vSq^?`{WHu!KXrDDZQ`kWJWOwkZ#S1NQ;ytMj#N zPVf^ae+6$+;1NThqSqN%=5qTd>gPvje6YWJy1Tk-N{>`-B>CT=gAl-sBSdHdm6Utt z;5C8q0u3oxp$I}r_T%(#?~0D{7K){ca%OyCp)$lvm>`3Sa236nwKbG*P%v>C+O1+5 zmNA&0=CC1@?8O5vd6|BLRz!fH0FX_zodqi?FJ0kO5F|1HG+ylEe|~-r?G6SXBtF~P zG-V>@qQQ8YmVSAAan6qp+~I8fhrr7TyS-;BAuA~>)pFL{(94K_dBrs%?!sQ=p+=$& zHc}k(!7}cO5Sxr`1`@an!0BAUB`r9iq%7_GJBXM|n7i33MIBj;`Ug^r`Uky;qasZ8 zYCA5K47Z}HYD{&tZAfUvYN5(^Q0x(BCUKffLRwmU*QW7yt^9>%gZZpim5EyM$zdg_66?a!=wEA@}cS!!4PT4;E-ea`Vvq$oHU#@kKgQ5 zBE(AAgcON_HFLy#^hG0qiQ8)%YZ)g7O}`~$#L3A^fJIu+SX_4F8D58YZR@)hX#LO9oeN2n5113Cdir2&I-V!s4rJ-F5DxG) z;nz^C$#$G>7Y+y?9v+jaVobvOZGh~8`N;KEKuJkypzK}`UWNqPYD>Z1xxi&09^C&C zIxxh9Yzw74MT9D*EF&Xk0+FUjQ~@I)>7(|EKP!+>?^pY|#?v=ifLcrm3&$QxP0bKe zm&OS0z1EZLyc@Vdga$zSvjrP>6S^om^e6c&L=fQegW>Y9qXw~e%NK6sL){=Iuw0uMiQWvtSwtw*ZNo?G&m^m0BRYU$HW#Nw2~~UGA{igYZ{Lm z`3gPTu(f+qG#Y{$NdILnUp_6IHLaXBebX+58atZAAGD!-8AXE6DcK6_IZ?w zynk>Il8}&alagt}m%eXMTPj6)=y@lEG(_l#_*|p`o$*LvOXVtYO^DIsF&Z zP{xd+S1C|{ql>pI!_Mq|&d0LR-9jAOfCrQ|HVHR?HatVF z2iv}5%U*tt!GL$uk&JhZ4yg8D5`$3!a2i%fkia;E~q4IXak5tj8GL1 zjIJQcPfs(Yg8<1~cXt2}FAq?t`?tAVw$lugIQdb#-Wz_E6+VTh%ARK!U-)LcrV3@W z7-c>x21IE|Sr%-HUzjk5c2`eX$HXb)#7WD`t(Bj94~-BFu&|CUE`e!noc=3p8-L+A zQMuDfoGJgt#2`RGa|Q|TP4bmlm{lfg~L5^rCwBM zh6C>e-{ZX!YJ4WZ1c*aP%_+U@&wsfPhk>BeOCiKL8E{Liif&ndp;=HbGms=C1(1L{J2&lO?MAyjw*(sm2n1u+78?w&^)luY z*Cho01O584+@=GAgM8MeN*JL&YAHbA2A|G(g+^7PUEQF;cErKLLxzU7fD&9=8=oyS zgNGW?`{@rVo&%u_8#wgQSTn9_a1#k)oL}CK5*PQ>{@wq+TL)*lFf}zfJluo@Y%5_V zJM&=2W`mf_BqKA;?<8$37s+V25c%HS!YZw( z$(?C_qp3KapYAxSbvRzz{{i6EK)r^poz6dkLWGC1kkYV|-~c@(!!*bQIG|cuv6HCe zf`m*4HWf}t=-84$b0I3>^}noZa4RJ-R8h104iXi0xi@hJLLP=xhej#;rl)JgNkhs(h^fL5dS1wQWB1niPq|6NwSq) zG!6-}e8wxp^dG256k0V=e>7N885bVxot~OvG&tEsYoZbuLY2mP@d1)lm>dlmL)c)$ zf|J0dWhN4HGJ@Mrf3_kYu`nQmi=B*>B#eXRsE@)(7&#ck6CBfwTN+3zX=cV_5|GX{ zX&`5FJ41DTts)Baj#TS_%3|h{1XL6UADue`wN;Uxke+^e9YTPU!FFL@VxbC&GGSq! z6o_R>KAdeppH-~A*nh>tL_Q3gkcAB{Ou^$V+X;(nQ(j);*ILl3{UiCDKW~O%*;qOK zJ2xi#Hw5GSPwoF>0lEkD>-jOBEg!YpJv;MvX8D<{5my_$uoy;$jNT?;VU$7*zo$iB z-s=_|U0n`Bn4A)r^6SZwMZ%E9Rg++-F{Dph#BJB>A#WQZ9e)w$Fm3#S>sQm{uVQmd zEx2E|=dEnywt%`wC$`_1ntgCx1-+j>usS1G9vY@HoR67{h>992-DxFdry)Q>ipvf_ z&Fch0S9>qG7$~xtBd5PvC$6urV7sf^{8E$Sa z&58Yr_7bD`J%xev!c-gqj|;~BAK_~I6LL217xD-%GdjIC+h_$E)_Fk1-z9B zIV-&UQ4HcV8Xv|rKZ=pX1+4?Ry9pk+$=+uQIbF#VxI*Qo5xv;#P$9n+1w1_3MN~?K zw4HIEu6Q84OIY9IrwwBZ5RTTBo3CdvNeYA#GG!&BWCm*O!t+^@_PK+R>~sHF&hx_c zfa=$~D*`<#K$x)(96g?;LaJ0hc*(YdOr?>W0M7Oc`(N?|dxqBm_a}^`oSa15IY|FW zv&A8fDr5&pF$QkRj=>Ib(o1<};L>Z073kD?C;rY33x)R+0dm@b;)KYXTY3#&Z8hy) z6wq#N(oLop??}pOo@0a@c5~=opkFU14_4?H=ow_8HDiQi`3qHhb7L4H;fP_{H6zFL zI!kg1oyFA7l+-8TzX<7?%DHI)@l1dv!@vdzZWRttQ8(BBYTeEJGv1 zCi`e;C`;rZ-`TIwHmMFGqtM)jvkAFl(~|>H!+5*@&JmN6N$aY>rdAjmcuwSj1y7gd zJNb-AU^x_GHK*|JJ}++Tf3+5)y7-^mN?@F7|236VhH2c~@PH98njtv=40>YXFA4xB zMq(66*xH)5=hF~X71fCDZ3K)MO|vj~lQ5mhguIKl8z?g%j?Z_4RocUgWVg}UAIVqN z@N#klgRJ4Cpb2FwgN;d9+(hbN4W@{#NCVn<&*G#IUV6$?kBqO2FSJXBsvjq9fo_^& z{Q6r>vOPN_aPX~YB_M7#Yr<$zC5n;a0RVQCJe!77?=s5qK(I(4A15y<>MyhRUPCQC{R54|$*k`)WKX@BCm;P{x zEiPH}>I%=07aiJJ1Rp&{b~6q`CM_Ke)ai|AP$4u^qA=*XM=KQ05{5>UZuU%nx`e>w zYI_)EpG{FYOzq0FM3ga=5Ef<;kM%=;em;)~hDI{Fnx~I|vw;I=v#t4=Z6a(e3`Z$X z6wE=Oy&pg2Z#Y^CZdqD*VtpYO#QbvtMt59#c$OJ_uGIwoRMxr_>q#0qTYw-o1)j5S zhbmo6jiRi~@Ggega-9WW9h>Rd%)`ZfFc#QbZ}jN);_;rZniY(t?|~#_~sO zs;(EIBnN+o@dZ@8CgcnwdNMI6ux_TC8%;@Wpx%7R;ivl>U^9oepg4kq6-erbde*Ij z^MZR1@7nN0&*D0lpcT;_5R=Ve$E4S3=?lZ?J(wJacXVY1cf_c`u0t&Tt*ezOl!X50 zH3R$gh0JoEG@Ig>mYy-VyBjL8~DII+}f?I?C4;r;Oy3>lB8cSa$Etnye4%MjIQ{`;;x_S>o=`LNku z)0#nFZzOc7oHt-wpZ~>duTHWn8%~3&ja$5N*_^!Z(#Fw)n2rMMI;xankWE)-e2o6 zN=(s>r+r9_KtAT}w*zm=14xOzuTvOr1TBYAq$f!_agksu^s>M{Z!UoSz^pauI1vva zf*;>|54a*o0)>j~32HWe&N_^ciy2N(LM{Me4_CWCL5vwG28MG!_+$UbpeaeiO=GHFwyb|5gi;Hs$V?8q^C@t9sUnTO!DcbDE*duCCWBbLnKw9s`^aV3)vQ5s!MOx3e?ur~5gP!XC?A_)Cm zp2}BWz)uFUhaJzyw5oL*VGH*96KKEVyPrX#e@rz(D7+8{!E`+nLcoNe5DNmvyswp| zuP5A()UQ{%oA`Km_;xPsXXKxMX=tcfn7gXKB_~UJI-tsHl8}Ic?R9g4LhMcb>qa(Z zNhz&rtZbz6d`l)LjdTxtWW3spJq08-#lO~Tz7WO1khEr@k zb`dPt9%o;@ksJ<;b7z<5LJdCiu1S1^z>vB+aQL?ge|~HnoZ9{itojkSWI&Wvm&a@z z6N60V;)%=Quo2?zet!F}PC!`n7q)0Tl~OpG*8=$mY4`0%^vr;H>w)`qSNL*g_sg*V z8!V}bh||*$q%OZwNP9hGeY9OVM=TlJGBO^go#4hNgU`p8#X$S?gvGR8R(($h@4aRB zyvxqDyFc=d0{Otu;m>`QqK3~<#CDC1famW~<{@$NyL&jkidhEvi_G-y$z(se@VT`! zoCTgff~==%asM5V?e8NCV7wNbL;-t}>lwKv+vVt5(sSUTxe)ib{$7DBZsOivs$DsAE z7%W7A0%8!rj{;)JCo{}Mf!C4{U#lS4-2#7qCjMIOg4lOAUF_XSyR`!~jCb$!TZm|F zaov6F<&K!GSIp-1LG308rrY8y6OcrHhmhqWH^=wzrRwVLa=QG4FQ={Phxh-3H!dYr zPH(*%1OsZQTA!P_EJUIqNSRnr)fvwi3de7c%PGXOX%s$JJK7|$30)HpCB`YOsH{7! zcOV4|pNQR!g27p(q~jak>6_*tJKWuYn~bSCpmqfmr7VsnX^hDKm1sEH|ESnmxywZB zcqB_=pRy=?Q5!n16LX@^4RX+o6VA^I7cK@@H4c@v1(y;HKO!+&u!5KGrZ%v#5nbor z8vZOZI12bl2WugwrlAsg(`}~3|!xdDY?NsYLBN^gaLE5L9(== ze#-Gyzo`+BP0be+zp=Zzee>r$ii(O*XFhF_0ZC|%=L>(!+x5p+1h-zd`SlYdDRlD@ z;+{rp(>U_WTtFPYu1JQ$C{?K05aIr@5PW`QZA0w>gmV(;>ORVh9jj>r#$$ z;+0yI7C0?2T}18#M}4r8yHE#JNh2P_pzh7}ueuo$jFdt-9nMmv&t}|iM)RQJqDR_T z*cX<55H|gEzE2}{FhN@H)NwjI7@W0iS-qcR@aTBmyG#W8)$BGvvUC6~;De#w1pa;P z|GG5TaR1~QKB5LsgMQfwR)WaVf}QYtB{`*_{47K2lB*u?pEVJ_?+I@Vn!4oS(6N_S ze_TM|(=x-yzQ6|rXn)VkO>jID2NM^304ls#%=9wh$A`cNc7OlZ%S*5V6AKe;i7e#7 z0~?gM_{&>Y7oTRF>`gCv<=-IQIsz3H72OZ2Fq4^TQ?hYAFOQePnr>LeFCQ#G62S5J z2BVp+S)@b>=!L0VcJTg53x$^>k+2MbHPQA@Galn&!Kt@|@MKyqu-_6GUJF}0( zF_mW4?Kbte9X?^=vmg4tH~5Z(Z+alb6){GZ%wa*ecuQ=j1K_pU2|K()_;R4)IUr$Euv@JWhAmY!Gua7Og`~mg#OxfJOP`j_d6{n<&rahqn@S~hV zXr2125YgjdXxI9}BECPq_62$ZBF12J1zge6W^_d?J*Z`f<QQE)$&Whu4NTR6BY&|N%~7PaqQRBJqnvN9?{cN5YFiT z$(SePv;GiwyjflrZsn@fH8d+64@(?&w^+bD7`7W6eD!tT+SJ@) zcd|jI-SemwB|aa-{Ce_;Wx{f3_7V?e`r=<{SZ5g(E5~VfE`A;SsU4B&tF=)m+;@q` zYKi!zw;j*SR1%S{pzAna*F&_Pq@;Gh`8>DJ(?1%~5mBb5DzUcI^@^Ix)gyiUexMifSyAhcPDOf>QHTJk( zPYj$_nflMQ6l@&q0qE}oR*&|#{F+X*x}ICQ{oh53>Pk^b;dNwe>)Zbfq7rP3xY7Q{!`Q@2`Sa(!}_N%sN_bPD*~{Pehy1Z#u{~b2?N7kG}$? z3yw!Z>pD-kb^}4`u?#kl69?j=GFj}o^GP35VE`rXifB_UB)+@6r+Ukx=|0rZWuFL~;*+>!{0d~aa6&%}=LkXa~|P z`QYG#UFwpRS_id9CkcPd;~X-HRiYcEfr+w#}rK3@>-&N9xB@bluB zpRkoa1d$(Y(jvm$S&Y1GKATtJ@NIoYC?F5I2y9Nxul9b{NfbOfT7(sK;$01c-H3Yg z2AUm^9dr@Rqv8S}GIR}vk&%%|I0fKevz(Q5yphAV`2)+#SHltYH$N!c3cE|d>pnh+ z_mjvJm6T!+u60NDyC_!g_qAgl5ci3`j@=Og*38sb#TKiO^z(u3Hx{K(X|hqiJB*xr<{qh9QFjp<&$E61J_b#@%QX_e?>jFTnNPco zhnF;R=Tv{LUU2ZBW`jA6^K$9_a*cJiPZ>;qL155x(gJ97f55DzKSP8p9#sLr#t^@0 zKh6J{VL$|o_#W(uzY`BL7%|V!89x<=3IMyS@xAh0jU^q^ z{CD-YGv&|WIf@7;U)W)_X0?7^d|WHi$Ay4jRK6smq?og23}(6vN^Il1`3WTXt;-(#ZP=*UP|wk38G zY0fFzhyWNOTVFTCH*csv0cb>jP%z7Iphh8bKl|MnR0%zw*|N=P+?%E`TywLJRrAq= z7=9Nubffa*>cz)Y|Jv-*4jrf(hJiuIHmiUkdK5Lr(bgv1%^v(x;QBGqYW>60Wt-lf5*JsG4+I;Z2;|uof z$jSSC2RfOK*zWiRhFMlDbhIC}y$!nAxd0Ly6re$4EB}K|y=fQr0OHL{JTW6FLhgN~ zpt~FI`ul)6Rke_`a2EAzI5^7r$}VRTBti;iH1c3{?kgV;)GsjM2Re2Zwuprb(bvlN z3#++u*Y|dKLKHevdaR^$tO!u1V91!ZFt+eZp3Kzvi0%eX^NIs%)1Ib66<;_9h{f3P3>rA0y^By{n{&iw%3{eT;h=>knaKEnpE>hAGu^=uHXFk1(o{m096=HaLyCW#fAMT#+ z?7cKTrbB0I7w12%mVB_Bh=~n{TrGl6s1_mNA!w{`gMQM;Yin|J8l8>ZaFMD&te%$k zRxHg+Lr|zcl&&rq8yV;8rzQ!Niw2W=9l~M*v3=Keo_Fv5`oo7zuDLZt`?XDC#_pH4 zFeCcx$CpucvlcJ>#V*7p#-sA)J)HjVEW)G1nI}cyv{EHQR9q;7IQnrT+FCk!FZen@ zXzrX04B5rZ{Cnd+zQUl>r|+H;#$C!TT@P8?ZEfv%`NuCJCM^(^6%{!3P60BPrDM}G zD=;!Vj6?r&8Y+b=WFEjX&+UWVY{NTm{|*v|97BCNWNk>ow(Z-|*))K+|M7Q-j1FhX zVqzDO+;CV#2s}L9V71v;HngL)houu^;-aDTQbD4zA$3tQ`n!kG)!NJ8KQW@)n!4b$ z3kA8aI)phSg2`w^Z+|}m$uCJNLQmH)QZu8m=lLxd>KVm9|KA_6f8pzYeGq}c0XTE$ z3aZL)L*@tk;HNJjIKUrwD;se5U(X>SG9DunL+o=H6za!Jt=^6yJooxuB&Nio@OmMN zuU0bfCxD3Uc523AL7=Q6suOKou7?;mq z$F1TfxF|Jw zXC9{w9W5Pr`7aJ4enB{@DsJQ0`xlX!w-nnBuE5CP5I*?FWAN4n!YL;AIV(3Jghz(L z%~OpTy#ejbz3d13hkDk2UA(`mKc5G{zN;hi{l2bm69A)U8p@OOb?)!n_w<$W5&0Vv zutlDl^gZPl@?oU^=g;y4c+t=10kH4)#GJqT|LGzy4}kqoH>C47G#`N{Bk<<{z*ys- z0kU}jY<@oec_Z+L0$>C@kf*^nJ>~!~^2(rsa!YF`UjO-v@bUA-$z!LWA2+a94a-A0 z93Z;~ksR2(b2YOiE}ksHt@3j0e|;ws;$zTI*@VwNJ4*_=x-l_ntY$cGG>Ly6f!O%5ipgxY%!LuPQl&U zyEygkjW66{az->Z>|crT$q@{9jbcgSQWRe(LvcwdB2t2|ZTD7Yik$fLG&-Ax@Y6Sb z1ht18!{hz%_4Q#k2^Hjfy9RLa#5E`+s)t>2Pm3pkP$U-C)YgO-e!3HpG2tk+=PD4paDehJ^K%>^eVAJE7XP;qA>7x%m!szf6-uT6jp!HPZvrmtr zv9=GpcCSZ#dNg|*+4aETkB>5dxBtL1u!^iW{PAH7bWGs6=XW9`!H-Ss^}{y&)8D@Z zKk8(Eb~i){!6%I*D2b(~bWt-fbM`RG4mVYr`wA9z@uJAY8a`2^|fC z(71STq428!u#vG5tXP$cw9G^l6kcb!w*C8_fxEjKu3Wo{*49?M^2)31S@X~T{kQN7 z@xX>H8xR&1gc;*B7n5C8S`73Ix9~c;*LN5EmDVtEY=`v#1);i6PjyWh2~NwYXVaj;4lY?#%!4gCv?nXlrf5 z5B}nHghl(~?Afy@zj_D0LBt@ELoSuE>>1?%k{{c=Z3Eo>Gz^5xm13BTCW!13C`4-X zbqwR(-+siW`|(d-M_`ygjvYIW(Y|r+g!c+@!>*k>Fwi-IH-Ggz1c&+|AjA)OYnL&g zL(E2!lN_1oM}SW-?v}OU+=)wAy=fWp@>Zb!b_0$aKFQ23yVDMv$jI}8is_#o{)`Kl zo<45awtqba`v>sOFF!?GL<~#Y(H{xKm@?`Rof3kTt5;#TYaDO?>OF+U`LYL&yN3tP zoH&P}zH!b|-zES?GK0hzeC>@F;p6LrQ^!wXX3PKuc>{_Z><4JG+oAPRW7DoxyaunH zC_zL01HAN;XAlw=jPe_m4CE>lN_hEsV%4U6h?NeA`?zl@i0F!^{-cy1HXxWD=hpJ^>e*dRAg_Zst7=0CPe&HIB^e zbfjmev7Dj7WMtNb(KrP^&j4IHS%mKPKDcUJuw};iTeb?t5w*vc?~@MHLyyJ+-zYp0Zuy=i)85U7{Ui{9Y$PaG^PxbSh;Qm z18)|GNnp0BHO%6Ds_Ss!%q4^dgg_z^u}>ZOPHo(|nwiHv%|m$WS3>^$Y5?q!>lT4R zwD0r}_h3nW3YO2xWU!k7r&SD@NREk-DO^2Y0J~+* zqwlMqR@i6E@tEYMm>7cw1crHIsTZtXJw!vz!;?#-LXt_Ut*MD*lAz=Y1F1m@PyIssce*U$C2#WH9MXZNX zC5OZ0;G8QFDKI`fiEHO>z+rP@YGx8^H?BhTf=JFi4!fOMzhlD_xPJKtW^`uAC5o?P z*m7~%DT2jrf~%(sR&QJdcP}?g>!(A}To)7Ks58wUeJS#U>?WP8V2>c3eJx6}r(W%$W2@ z%}C;B7_n5uvZ!S)lE2ld;@y9j_$vnm5&^9-=>=A@aws`&`~9ub%az*Sb_cU7b*ZOTo;M)SgzOJ+S@v@{)Xb|M-ri^+-#)f0XnryT+x8bw*&mk^4 z4%C-Q?o;HPLfx(YK_n9>MQ`U23^Owb4E5m-+412COzNf?v{Gm!Si5}%cSGO1bsr^# zH{s>$gKb;4K_QVdV`gGv90v{_fLg1@sgq}5&>10@$hZ(rl2=r?ba7F`VznSKH6Bay z7D7KW3B!b${XM8?N=01V$RzI7-^JSP`IwxXM8Vk-?B8*K{czrX^8>`B`eVzEEwEXf zIDPCax*iPSCqI1+K4EU`9aMC!7?aae*tTa2LW09kc=0B>T6$UX>dOj~0stmOV`~#$ zeq$f}1O0IE_!W!~>L8U;G2Z}^+aXB`Dr_lTrP#E4ElgGue*1Sv@Z690AUrYz#U+L4 z?eD>kZF|uEU;rmRI*A=GtwmB|0;WfdxN)@*%@3N9w|Y6!a#GRO)`=r;pF!HvM6BGA z1+&8h!?=z8lh|%eWQEpC!AypW$BN-JNgySUr7yG(i&&0}J#}V1aLBBE&%v#TUl@nN zf$sRP@cW;GvfCnBlJ;1e#MO?^U zvvC!I0)kOdP>t5c4k%g5O!)X}wVNSQ*sycYb`XQ|$R|hPs_=lAD2%g!-cxIfvz|>6 zv7uPLCKJ^)73l5i!}e|4AeBjQ_uf60MSAwY^9(Y)_nX65yL%ZjmZYPjy%Se1UxvkE z#;VmTk+d)ocW>9D=yExHJ^i7Z)*)p{A{H-8X4%ZHmOB5oz5eCnl1a>*? z+>FrG+l6Oe-p(NY`IA>LGiicMtYH5gN(cm8S{z1f-L(ns-mVxM9mn0udLCPMPd9ja zcrmE9Br6^1IjLxAYv!CovV<$wEJsRGDgy%4*sy%#V&p8(Lc^W=sHm#Mffx6)Ebd3Y zJA%Arc}Pf%$E6FGG18}lQbuf+FS0!5e6VQU$y|Xg+c$HA#DzOUeQ^@| z_*^?(ih=GCXf$fX#m8c3c#w1Dv~dc7A^upqVKuB~JI)-v$e@r^>V!@|hV&JwNL-kN z(!vUKw01KH%mrkd6#AJd_MFVhS%R+4ZWI*U!06}%7Nw}gN7SWNKn(_;OO ze5loG^gbBIzy9VUL`Mq+^DjT11i)C)z-hy@Ws)TZX^%Pa*(pdJ3P{BQP)>iCiR$|_`qVeV4!;xdXpY;X_3g!TY-U=A-wbU$B2oJg26TexjV3x#>Xnf zxf2&)p0cvc<;soOFqjP}xn9N$8>vEq1q%{kGFZ{t(guZ#0!Eu2zF}@yyKW6LzRsVz zj3ECY2D3nHmRWzJuU}EZKE?YFAJnMRj(yK=K}>QKZdKjJrQ^j|v@j8CcIRSvY!vVR z@;I_`GO&Ej64W)_W?wU@Q;G9O3gIHr%=%iA%&e22A&j<3>^ryzE-osZJaHEKag)$# zudreJ&aKdmPvQNy59661Y(iv26bi4FGb>O{fQa3Wwze)j|LQ(I&xNxWFxEE-7rC1E zP3o|)+3ZlMl?Vv-X7HE*c9KU74E2UWDMxqP0Q;JL)91FYkJ+Oqyiwf{(sB}!wKNNN zEAOMUupB<#J`65Va}EJCPKkq=b55%ReZ760XZ`*C;pXm!{@#8VXUyDaVkMU?#|(VK z+_8Sc8niaFtXhWQp%GlXa0&XU8D!@!#iGm8{~BV0 z;t-b>hZ)N>TH6|7FzX?eOR;6kX7=YJkG9bP9X}6PV1U+Fg|%DrFxWqY!*5+eR75Dx z2g;jviv#Z78m!u~jNgYqyS|!z6a@-;TftZ_Xj- zKP!O5F5`23TTL0?d=CBjUrG6H9sv9EkHh>GJQ;y`0Bqg^Y<^A5N8nEpfq4MzPZ8z$ zbIeEJ&lrJu0PN2g{P|P=*(30W0$?QJNyV0LdN3$L#U%#71g^{Y^a!@?T@N>{7MD*I z!8~JQ<`(^&05JL$GMKm7tnB$f?_n~VU^E)xsv-84n7y?Wt`e-?nhTTJjAI|3ft#xr zdfEoD@5P;nhziB|^JiH?YwOl+a5{hs=PtluaYC!nASxytA~CV2MCk1qKzC;^k{2z& zk`*Z|)o7w3uT0KzF;u9c?&F60by&ZB6{aU=@c!SQz}^GfS+eQs^-CC^8pD>YTVb<0 zaPjPAbT{|o4cE^UVSGpj7qyCIprlF($BAU_hDQ6D*`b@9#G&7u$CiB?5w{=$7cQU0$jC7E z?IFOw4J7~5W0gM_#^)K@p++{x>Ri3mYUM-N{9#U7~DuDE!t0IpgUR&2?D$!35- zXJcLfmr2ETxOu5yp0VQknQ}OcVn_(&ddj3=e;|^QC94XNn7g5+E@G_Txf)YblQ@6$ z5}qw$m`|I;fn|f2ouV#BEe6KfQ&_1$*8QTLVJBL6ml0B z9n(TV)d3tkevCbQHg4Deo7sl9fB7K}zPg3u_~FNguxHm^_<8x@%$d_X&O3JP#o&Mr z@Biily!5kYAQIVd3`#w!h%`gB~ zTXT=EkC>SxPu$+#h0QxwB7R{k16S=$T~I6C7$lR4WN2<_!w+7437#SDID76K1Gxk| zxdke?fH=~j!~4HGjD;C7ShsO4hWdx_;ct&2JSGsGE&bT~;!4EE#^P@IecY?5!^p@4 zcI?`On1o1_6qTa4WeCK++wt5+`1ttX#`!YLOqd~+$apAu!>lJc`?(OS#Kf#90=+4x%qlE@ZT!Azu4P`1$ceGZVQkyK0a{lr zuAMG{WyS`vR9H7uc%~_5G|q6rU!~MSB$mKrvA}AvK&#d;NJi_C@-cZ(^|W{4;E#8( zcj3j;*P+)Ln7Kz|K}BvmY%n_v*tl&SGpMd#EkJE?3nC&ySQ3%)9{Eub0PN=NinHg= zVroRs1xJU(guT1>z;2Y_H@|!bxf@ckBqJRag_S5Ts>92FxtnEiKm4cT$X}C-w52I1 zE-qmYH5WPg)_f5FoAb~jsoI1^kx0*2%o3m%Pu@U4j6VaCFTB1JzFs~kIA4a5{&C1< zGPJgKa;SsCyrI^pFgVtawVPKXJTwB=&z4|fXcAJf2(6vnSid_53z8F2T2O)ZmM*v` zR6KVjA}J>IW7xcREtIZuTs(UPPAednE78;2gDpGPAv8D)*Uy$>azxKS_UIIi*T&iL zDuUJEU_kycp=}t=Gt9iE{es4g8Xw3zh?sGLG?7raru|*1l_5Djj+-LJ`=(G^UB^ux zj{vaGXKH~}D;$sgjONnBC^EB`AZ1AsYN~5+uktwa@@4Z8aj*flIVC(dzp3Qo z<9V0nyk0+v$mkF(%ge=>PKUFn&Os$oGxKxJj^*eZ?8nEyK7${<@fvJ)0}g%q2@brt z8&Z)H1!pT^BhW%Z`x!sY@V>li(=x=w#h|XXkv&H}wB8sV9mKlLc?b*&!j$|Np61GbH+E%FHd_Vt$!XR!7iw=a?A%#M$L{n=s4!pJv!C`^8 zdf^6oJNlXB+|kpEmw&dGrBbh-DuK;FVE^Om=!?Z=r`-Xm3$SMA3QW)FS(-Q^G7KH< zU0A;(4~ZGEELnKtcm+ZOA~C2N#MtO4yaL?d=AnVfVnA$sG}6)+;db>MZj>NNWTRyU zZoX=)+n&b&(~;j?WZ7=^ij;`Z+0ltT&um9XoFA^;xQf2cA!t-?n9@&T!GZ)VS+WGT zDsQ8?PH5~Io1lh}B}h(7#_gIrsI6?^IYi9|g72S$pP38>@|ktz{e=!<ze46C@TZ8tJOK8mi1Pe7<|FVOiNM$SfPGsYVDl1)-;n{IzYH#x zczby>XhKEFZ{86s6vqCo0GI#)2n;a(eK;^}&>?C;C^DC3qUC-E7lgE0w^^|`{xr{$ zXtX|=B|*PC%Ptc9_ozGF4w*`X6&tfKshh&3V}-e5BcDQw%e6)x^BxN`Xldwx+- z+&A1E*}2(hzu%3jn>BFLdVXoAI?Lxd#n783k-sGuo?ad(x?G0Q!3k(Rwa8wU1sAQ9 z3!UwaohT~4ft?4pKtFB3k+;ub{g#c0OA5!0qAM5~8NrrK+c7h3$0xu07@H2PMpQ&N zDvPQy)I9=~+!f93&Di<%kdZOO*Kz#xvjeGPuWey}>sEXlQT`zpA| zU2x+<1so<3o4Y^lc)YAiGl67;hQlwo>&hKmO@p=OMFBqYTmd2s@2s&AvSsULEg0u~stcEcJ5T}f(>y5iSwTo1Fw zj0epRusC%Q?%b}$&8xL6qvhl2f%6y6aF_nZ&6^nr85$ZvY-}7V%5S2-ubUZH60w3k zzpOS3Hg4R&bMVrItB{LST;!LyI{7??Ni)u#ya0ENhtQ$z1avcFSiN-xd%zW6DrbMH zM^Zaq27n2Xqv9dOJAZR70~=OTr&B$iPO=Dvtw(j1@g{~`S3n3E; zF&&*4!sb2epw((|<3bs%1{=pOF*O#6i(^rDuK{&cEpS(RaiLl8Y*VrXTK<-$2#pSa z(PV%}O}GZn9=rpA$$k&3wFXq4S7XX!&?Jj}^fwYIk7#TTB1XOKHCUb=*taT8y= z#!rE@Yt~?_dkP=!M}(U6hLZx0}&;?*V(Lxp`@@ zVqGpv8-DiwMIU1OTnzRLV<3;%RDxIH=MVKC*;p<~bve{%O ze3^w~X4l8#{-IC$Rz0~=e!s1X3huzPQHRI{p~%S0U>UW_vTC?%w8E8ehyU!EsVRX4 zI*CTWmC?_gmOKbW=<09B&Sy8n+s6yn&lh86%EVq04|BSRgb$5ayM7h>fn7X*2{NaO zrNQV$#3Zvg25E$l%#cAoNDXqobx;pGU zxRrtAkKa6jm3hmNl$C&r$_n(g4b1{zUyKyrg;NY-rev>PjOf@XloeK^zh@Y7nF<{( zt=Rj@7K8)^qLA|G&^R2h!N=blvGGyRxTzTgBu_<|ixdi}3req4VSI2Ba+w^HGvip8 zm4Ji=@u(`jgPx9lC=|p-6BsX2C9r&b78Gh3jMFC0@7xFgCu9lUg0N7j{p9I3_e#kdy&6Yg&;kt6V04_wUn`8Ta{2Cx5(P`|jtq`t=dK-abyMQv#f!XeN<_ju;z`Q)&+eEo zTJ`V_aYbJKa&)%zq3BvEyxe_frE(vwy>GgfL}DilCOtEA$k&Ph>f&qV%ybqzoN)DU zLEgq342}&k!zwu;6%{ut*;8=S_SMYJDLh{RkwY#36A~w;CdXOgFn(b)y1RQ&bhUz| z53M#U0z>?eo}G%uh9(r8E$7SJ@xo?=g@>T{dKuc9I^gB*1u@M7famkNt@-fv_2qdx zHEv)KneUXJm+UkH1!+89*8)38NYQiq%-q-uV>IWZ_hl@pZxTDocjU8|o|%Tklz85c zo9hH-H_cyu@7Z}wOl;wD=H`g7Hs(f)eoPGpZb2%n$XkZ)mVRy+@%HqC*eT(^6Z%mG zr^x@2q*Dp#B0tpTjuz~BX)^+Y0#S6a3=_lCkWvH4mogunu-eS<@Y7(~`YdMVo_Oyv z;-cbUu;}3%;)SgIG<3Chq4YvEyxn~vc5!0Cq6Bz&xr62l-4;FjC{+s7-D$x6+pW;3 z++neqq4iNAZ$lO{K9BzEB_u>9z$Uix{y`GXn|Ex0SC9tdx-o7pqGkdbCMrZx^8$Hp zmKWcGx0^Sdb~{T*uidtqn_@1Xz5ykHjI$sLKPSvK5pkNI1;mBVu#Yzh2(e}vSd)SO zDisO_NFOmKpZ+ZOP3FSC=hw`69X)vuU+*LLP49Ug0Q;tqnt$C~1inoGjB+7?u5(^s z)O7H94=^b;BG7AQeZK^6XAyrt=K=O@Ng#fc+5??)u4#RKR?J6WJ_3Kd2+RXuf4tbv zzw>+qzH=7Fd$WUt^%G~A7*bY&PrpSney^7_GQ|- zbt~LFTydk|1`N~0;*h}NFd#ZM67h-gXliIddEqT)9waS^MSEKt?%r$^Jeo*NiNqP5 z4yfG}$X%U|8T|}OFV{dnJ%gOp8SwGoPGqGLGU4x8bJpVi`3nj99)VgF&2|*KV=@ zieQrbxyd`r=;PuR6=$dr27n6)TrRH=;w)^Xj=zPxDu=fWlSAfUiMk zLcz5{s8lMfT(uG|aybfaTxXy&e^nljwL~PxK<_9@OH1M7=Z=;6c?_IhyLt^0sR%1q zuH<_uxJqm)6&LgDax+$}Tmh@efr2ZAP%B)y=tz=+Zr&PXOzt zM@1KUDAil0uwuhf_C_Oc%4W5rqoWgx)6>x1(}n1W7*vr=@E(GbnQ$97uZMS#C$3*FfMLSKMQ2xE zIr8)KG1fbULmwYQW_~hv=XbUCqNd^w++95|W7Z=iE&!=%sc5>}ist*RP$^ZI)Q=-? zLk@f4T{~BZ*qB(vBu3y)O)dIbX5%9f7*7;Gp8zi`$z6<|u3p?Kx&u$GH>`Fm7cr@r z`SAX~Hm9>&(5$zjh(X@pM;>u*`Oym1TXa~nZVgK=Ub|cXv4dnvr4KPKy1qvk$|DeV z?%(*@39xobFsd8I$}PF@@bWZ_s4h{`MMe%J66Vpqo#>m(pR&UBiaBv_>3o4nR z;qB*v%FdSaHPMIk;R9@RH%G1xoI3@<8h z5@SxOks)XG5@v06xAwC17;~GM4daB}>cr^41p7r&g9GJM8n1O5)^Z~NjhDnJdl)|< z&Yv%23*$4#BAoL^6u?Ykj+*#-h_NWJp=d46p&?&Z;Qnkp8MpudAOJ~3K~!R)oL))m zB6&$X;^N~_SJTLnj|ALt!JRr4`10o7d+u82u8)AK8TdzfVCm8ep>D)DVczDAu zvP0_XMBd8fuoz4zyj~28*^bp~Rzt3o;Kua=z$RmGgRfsK0s+W#(y}JrA5_R)mW}RK z+8=H)J5BJ4{5J}@iJUN7j8MADSb|e3m!SM+B_@XSoXhQY3uG!WGFB`CO>2=uhN1pZ zRFu}R^xX2*>CDn9zEa6dFbap>FopQ|NTe)E!tmHIZd@;dtHO(cvKiAfa`UoTGL+ax z5{DXgy93@qT6lW7GpmWb%xG>oYz|Oi(qNv3Pp~_3a7bnRO$w4vO=Zj!6@b$G^~;#qRd}(K_mf8q=Q)r|7#RX! z1VHm~08Ds|!7>f65DiNjcD3}Os;mZXG_DRIPZ=%KShg+`YLyF0u2#Zs1j}-c=!TKM zDVwDhZ(gs)_%KOD)7txz`7Uz6Y%#&h-wm0o(lFRJgrc)m@NxHNc}~9&FDzM>g3h*1 zR9w3aUvD2QS)K;1hX%cUgUq(FTP;wjln9LoVn56WcRTr6hMx6Q?h53tUBUo+(Wy%K zc?Q8IwlLeAn0k3@mP6~UL{oFqtURT_9%c;F0ZdNlF*&AZ=~amn+&GrFBpxwwv8cb@ zh_>cVxG2;@4xs1MSs72EN#ya7`r^9gr&{v!bvRg2fg6h+#_9V9fN`#zE&4w>$L9gC zuYG)(f59Iu0`maaJOK7bn_Tm6JRgC_A}|kt%}MLNpt-{b+mjWf!z=@g zST{;wX?{8bQKeUI!OfLCRVJ}8EeYXKp{TB^LTghyR<2$JzYuTK)ZD^g=P;BqB@DA( zWD8Q_nMG4q^8ks73y_o+gUxL zdANJRX_Me)Ng1Z6rWq(vsZ}T`ECs9tfLY-cqQ&ARY3Oe0L4ECgsJ}v*Na%AHF{rmN zGm+&Mo9bF{zwUw1Wo$RXD@cp1taS8s^r8AzHCAt0$qbE?A794Cb(;_x=Z~9}MVJ^H z$GTM;U?u?UlS5d)V+Fz@Lvi=kee`t>z(uZtZh9Qc)?~uN%M+!Am28-voxd0pV-u(< zxCgbujX?&x*o@2-X)LW%dE*Wo7NG_G3jo+-g{LP+}*bEdB zCCgG$VU9r)i4!`54$JeGLF?+uakZML<5~)n*@)1{AjB_>K|}q0^tB8_AydFAF<@DK zF6>qZiV92E-zaDKQUrv0GEjP}=q?O;1LRsMmMzbL*p)phx z-lmEYG~P<&EX&5khz^B?MNqq{k-IFH0YCDb6WL_!T_@9sSTbvBOpoG%Qn4HoyL-CV!Ie!ojn*B7(nLIES6f* zYTQv>Qj77iaV|ho(Yv&u8e;M!5`lrUr7P2!AxHP+w2_3G5s|T>NJxsty}R}3Yae+G zV4@W$;dz&qlZ4=~AXJrAqpPb2>o*A=V7JQdU~+6)klK0-N_kqb@nHsFe>XvLt`Pj zkmvv;rzE28ZUaJsLfChy=te1fABl;H!i7dV-v{NI+?83-Xtby*zRi-L#5ASC^Bk*z zulFnW9>GIPu9R}FpkgNZ*)UU#3)ug*0v!K_$N@dwBLT$8ahzY<8VR%@4?SUo1VDNY zR+$^2N8p8UYKEjaFA&i0;pvX@!fKYR6})1EdFy}`7N-fz^Kw9vVx`45A!G1W0A84F zU@;(X7GZr0IuFi*2p~B@Wmk=24kA+DKaj)t=y1KiOw;>k}yAzjB zUPp4$LL{ce;m+M!40eq@oDWY6FI_oF@5L?*M@(!C>TcghU-u9cG8gD3$FO2;7E4=_ zmmT?ZW#leKL_|1lSKdKoc`X70eK62Jg00W4hJQc+DvD|_HEMvIz**BY;!+|I9TSB+ zRrMI^8<~~4q_jnzPZlg)xde8b4X2M>fR`Kj>GoKOl)#K9Ipk*@elVy z6@lmvkEcO80>ereHCj(j&YiS{QjwlIwarE=^iu|w8~wt;4GO}+Bgx;)GOZrV-8QEM z61fA*S1efeu4q^`S~iW18R71p@xSQ%b*+8qo|-1N`;0SVLSrV$jnN| zBw*Gy{F@1E{@q53aL~ z6kqQA^!X?V5`A{36*8p+OEMQhtyZyY9XpVgm03#5Qq!NX> zs8GnEQd9n>oMYg|ir*Uqqvw>F2_&b-BPuGApDPA>2BBd2R{|s-?QsH2ZSFb+piW>& z3iHok)x* z2I_@8^(BYM$w~++H#ss_F2=~n7|O5Kz+L0b%|HGjK1j((KzC<1ZeFRx!sLZW$y|We z_I6x5RSN&WKn(WxW5w#Fh)a${Q{w})-R}jhTbsiS7p*Sb^TvPv?DG-$lSbfA1%OeP9Tl`lPKFpXPe>x# z1z!&O5KLbQpdhaY{(kl=6@chNIC(D#Uu_S+()*D-)#WQ!FflQXmey7-uFN0v5qMez z{xASckU*pl=T8=-zN|<~k}k6huvxDlp=(^=c9C#aX6|BsuZrRtNI?Z6L2}`t{OA(} zg(8{3j_QhA(9g^uJ3AX{l^PXgRd8Cw%zp6*a79XLGJ4wk@Zf$6 zR7!y<^t5A+$v_MXnOcm@rAxTWoFvVRGgdB^_y&6*eaRy9b@t)zy;`hTvm8df8K)0l z#riet5ESKy%3C)vIXQ{^+*q6J*2zJHI{I%-&~X7~npB5Cmgbaixb z!JOEH-hpn+_?e!XK~+T!EM_YrqQeoF6o$T@epKGL4Of*bLSh4uv@i+1or4T^P?2fj zq9g=I1)#aH8Ff{ST!2bQjYect1X>!~(9+Vz1*5E`Sx~#GaHr-j26~41enMk{k-RX8 zrS|UMYhoV|CO?VoJZ>bXR#8^TKq$#Te)TaIH=&qCb1W`30)Zj^fL)B*+xM6Om64SJ z4?j0dPE6r;c?0{Yq-G={ATWRnkiA`fTxj$Q@WG;tg_xMo;dWUagAG*R3yTOwY(fO? z*EgcSYXk}gm}UVIh%q&z!@`UO2n?e4Yd~LrAAAD6uz2wzW|38w*FYkcKgMAEVjiHi zP9PrbWdt5t9aiY3bli1LG(Drqi0tg8@CoonbyW@fA`z%Y09bfb2x1c>8Q>%jusKOB zkwgrG(SU_1i3ko4;B`LOGYXr-in!Eh#Ky*8v{#3s!XkJDcyb}tX0f8Yq=K0>1fFK* zrLnYTV|^>S+xr+8AqIsNCS+!(GiXDg74lx?)?qg(d6f$NnJUZVu-qDC?Nkx5FkiC2f zKYLV^)Ibb57r-Ae1m;T4UtFicebQPKV|qr1jOA&}Agn5_fyGDwn9!)e>lrq30Yi39 zCO_L$lvgqU#*$hLvN+juj=Z=60{u{Xrxtx({k%Si0h5`PiKz)auHPs?OkyMw;uoOd zb`#IH?EDOV&l{IYkr0=_o^bW`^%(9QheE1+STuh!Ho|pT>?U~nYmvGrg?;8I533Yv z1{<@ME#~K>nzB0fRHE+#0|Qu=c6>y~g<+vEkGf2%yg1?_X>TWfnzh@9irBZnQb2|9{X@8sB;~wTjk&S^k zDnwV7(tarWTtWKt>x=Glu3#+yAP&0?YOM;Xi<2108SEQDbwv$aTnJ2?+h^wX(We1m zkMcG*qRfIS%*aelMo?%VYHROdbWq2!c*p<~2KsY{P`)Do%^{+7mBD~gb!834MkaYb zBp<@4s0eg6cjG~03%@=#J{FNNp}a5OyWId+7p;(&NNP^5dL=RcjTfyIaT1P(SYg{uzxaNnA3cD>WM75h3Vm z?ZB<-JCI6Ukg_NRkl z!gz%^zyN&mqC}pD^|cMmSd@xX41f_}K`GN>H6kF`4++Wf=lS7Koj&?A$`7=TVliAHF6D4HAEnB`AE zk&Bw1Ulzk`wlFi3Bv>Cxh(7I>W?68Q^V8{hMk+&P$sJ~9k_Rn$Zuy0JGXPdye1|bqNsosd&i0m6nhu3V*9Wc zst8ySEOb(VKuAIf5Yl%y|NptOZ?g#rA&`V6-1+@rv%Bxjn>ll5-rIZT+?$k`gjIj7 zN4+rGIrB%l5E{zO5VQ-U>J!q?Mm6Sc^z73EQLS3w@4r`KXBzXW1<7V5Q+z|*T4Xjq z_xYQtWt|1+JD@irnuJRT@qd=Dg1gYDRFBod4Fgt4$?CvB39#xwwRYF(j|3R+t^AG7 zxx)2=fAiifIh?)!%rVw|TVAdArCtv-f*Jy`cH6i1&VLY40?dELLR+E;R0{&uO<8h0 zcz5KMeW}CA0cO=NyDgOftCq3UN-K}Rp%Y-{TD}CB`J^I+LMf-4JPDgK>Ew=yZH4HV zXvD2ck`Od>s?lNYdsfLSc$~IHMd0uLUdMHnkc2Cjt(R~xR2p>X+#W5Xnqk$d)yUkD zCFE6Xm$qopsyX&%?vYS*#33@WndDQ7k4wbH#7&a?yj`c(vO707t3VVCR;~(?(A#%z zN2hKbke8c}f0wNlB8?%h>Nf~OkKUccQJS?kR|pD{asvhoKuAciC|KN1CU|$ELDM?u z+N~?LZr&!2Q>rqoaA{?YCSBwV&nJo)I;>(^VP94j($aSzKR*wRnl?n!CQXC@i;IuL zfRp+oFDD;M=l_lY{fb6pS3n_p9oJ0=uz&wtYX~q;s8btzspx6FFW6a>$1YLdE)rw6>_(HPXq=A;jceep~z*F z27y5V=-IEUT$}lH=q8G2(g5`vgd=Bf9{&C7UkMrL2(+PlpUwyl3C6aRbV(N9vQ2Xo zxdX8B?{(tvVtsUsckA6*>Py|UO+wWEfku4jebLWoOmYOcPw&*k~&b4vCpMLTs?^jg%HEz)Wox65K zPF9}eFypv&>(NzmCZ(rs$I9jFMFAHU?m*Y>UEp%M#j($RwQSK6p<$sy(Co~h!z@s8 zl+cORF19r`Y=}cfY9<__v?$hW=H%}~r=A_qta&r6{%0L_@7g288HwYz?b={lY8v9! zCL@SRj}EX7aJ2GrbJ3-H7c^)bjvc$UBP(m45CT*c(SG;r&cdpHSHn({hE8+06QOm2 z(4%h`nK#6_$Yn!bP9Apc%#_U{jT%RwQ}_1B&e>;3hR{%iH>rmP;SI1eH4}gT^)GA= zJ7Qzoqjj6+$jHdR_Vi4-X5;1!#D0_GQjoGSRmd(9HPYv1bpj&m~@A!~QG z*bH+chSm!bM>In^rfo|XArwP|?%KJ>kX&_x(4%K}!H_u=bFy=U_-Nd?p{zUW*2J3} zfu7+bMkG|*w2KlYTEhB7q-{w_!n=#GI!#6ZW-VH#Cim#F~|Bu|3^{eKlbMJMJROTp;!Oz z2n-6sKYy-<+v&;qB_uqalW}_ z{p`}U6PmYZhSjT9WA~0MvyPP@Rt)K9iZwnSOA@7<6K7d+t$R%wreKac1^AclWp7hp7uFsAADz@{=D_0=Xvk7?zPsn zd{)NA+nx9hwW5S_9;}k(xQs0NV=vJsR}J!(=$nJTaJR>QMKmp25fw-bdXL%o zJ3KcRs_Ec4p05m2_6?dAzwNmaGV}+mfqNRBuhEoi*37T8abu5$Gt%0?idtA!JhijJ ztcQ`^R+~-gYCvt)JHq1~v=rX?7D1?=;w~37n7#4)50kO(eX7Imjj*aj))y-iv1mgJ zs6~jB>@HE{A3pV!2wj=mMsprkgHtYV3Ab9)ZXKpE~z5{M4?}Jylzp4 z=1TokiKp;jGeSXXyxC12jUEEm@lgGvbq?i61-`Ry1rF4#bGD)pWv~dWgs`8CjwFP- zQlV-#{18kaiyBB9MXTAA*!U+; zdxwe8=yR4wexb!Ih2WJPBdV;fFJY_mArSBlwYMnff8!Mm@Vq$SrvFB|O_mQ%CfpR?HOy~v%u*?uU zySoNPe@st?$5^?F0k2qvpk*uHNYhK@tMfeBAbB1^%7n(=u|t7Vlk8TI@k8Mt9CidT z7qEDE_(w*yom=#L)mwECB$XR1ro40 z+6)4cZ~ao=F!om6soS_?wN}PuSvXlFtUQ>}w=y_AM!lHa*|QI+6laiLHn*tVRQYfAhI@T}C#;y8irY zlqoH-LhN?`lFD)aTCqEOreQ@DiQx*QEsV?XZA0(v;BopLMM)-PzI?%VyS{XG)IBZ{ z3DuLny(i@`Ssl)=W>2kfcvlm#6oaedm&0NSzc#~cXC!CSX&@mV#U&QQBkOf)WfKT& zR!h-XNo{&6b-1Qlyr63S-j;1o7dj}_TVRtOUu5e%OYQ)*uH@ScuDy8XLe@pD#cEa2 z6gGEftAT=MphRE4#otC4ykhFomAZV<&}ylSD_-v~YpSnuC>X32AokL$+=)S5cHST5 z32~yJpnw*LCOBD&qqIa7)6ZQXQ^}t`%_m*YZ4gxSsNpdpS5#sm!O?ZtU~}^G?~BUr z@a#^~Q$M_nM~x7%g$U-H%9E-`v`sr~i$iQu*z4(u_PX>LY6Z_w2@n=&o0xX&+HgNv z?rWGp8LW{Ox*cW~V~Q6RDic>oWcrppsuEQ0qaiN20Bx4N-1gQS{@K5qsNM!6D(ur( zig~*1mW$Dr=}Cgi>=;}rZ;3l`8va*wqGQ2{PVN3GsaX4P(i7)*80&el%9*U?e1Ses znIg6wUseLm&a)>vGNUf~EB(wWSt z5=uhaP*Z9T3(!bFK%x$R{cQ#k5;8G9EV5E646`XozMqGrAEF=UWh0K*dVfR&G2W_j zxrbH66r2aM3Ym{cE->=gCxg90fYL-Kut%1I%YL>}-|w)|k(R+Lp3F#2JeQ~HK6OQJ z(>oX6TyRt_BW(raHe#P3YsSmeXqIcRExOR@gV9K0hmv8nK!Q`TT>qVzLVUh;C6nf4 zr3GbNs~%A?r<8|R4)f1?a4aXhaD(NKR+rv>Rvk!@D{gU<*4P~Ql$)MfCzMx^g*q>M zR#Ll=x@u_Gs*xG)2*(7Bq*^8fwE=_m>=Q@Sn4j6BK_JZ3OsC0KhVH_t)&voH88mru zDxj|J_bcFOEkPQv&SAA)`;}R1Eagl}LNPpzh!h+~ zT<+z>xeyCAwxC0p1$MAw@y1HH;Xdc$y8iKY(G$M3jUYu3|IOHZZgY zTvUESOnnrd@y%Y!7|JH{i^KSjD=Id@L7ND^oca= zcp<5sBh?y+^Gv1B(2kiMD4M^>U_y^Bk`jW-&@mU7GvBs>YyWy9I?KO?OHxIOwso+5 zx(hI?Mi|@d*6FscEy=Os%J5GcO<)DJ;USJJ5bs?$)5f(qxr7!Q_*Rk%v&{0zOo!kw6LO6AH-Tr8sy6WqY zkOX^m$UmR3lo;&`T&o=i<>N$6@Q;ls^ixJDqe&Y_9Utmmoz3GTP1iW%dt{rmly&Zi z+S0~Z@A}5*&tB}%R*|(78wq@lWn+3PxR-y~a89>qi8)V`hyGpU(TJhoDfNEVm=c1(>X1EsWcv_(MHv#Vi7nxd?6N;To2 z;;3^?p(g!FoRF9!tX{@2F(ZSTqw|1-yoF9VjXi*ePz9X({%aba>kK?qR_35>1&4;J zUq8IH>!Le}6W=!oRxOa)y1L!3upX-1sv?F~gRJ0DI_Oh_7H)gIitX8{w+SR)zx>4) zO1{X}*Br=3Z5`_%q4*uJv7xVks32pxm%p1!--xg!Nbhe-BlZgTd-5$qI--j+t}+uC7X)3X@U8Q~I@)$#(zceV-F(P$ZSeOtP% zx(JOF*brpCk!2&NcZYnbzjU=o#^dQO$ec-0Y+{eaWg{e8GJeZ$mZ7b1N8ygPR#%Om z=UW0b7{?ePAzRGX2|pphVH5cZ#qVG+6H-zujqzw*<#UX2lYPA-+JjEQPWBd~aj*KN zl;zlAka>(c-`%6PZ z6Z<%3r9^9sS~qxRTo7A~g;m?hf0l%dCw-Pjnj>YgKNj^H8VrYzHxzBJjoSl~mb%(e z&{0U5@OvLlJ`MRJS{I>!xLYNkG_J8>{#}Urs>Hic z&2aHCp?R^ju4Y(bMu*TGyEh}B{Y@a3>SPmR8FZ0H9YHY3D;%n^#e#NouUrI^Tqw6m ziUNBw?Qr%6WFe339(%U7%b89Y;JxbZ=Yw=_U4E$=*Zzt&WD7y`&c3Kn#V>!G!J;>y z;+|+QWSVs1m0-xR7e+TQ=g5XLP(Rsd#Nim|X)Sk6?aa_QMc-87RngGJr-f2=XLr&# zMG~Cs9kc>dzG(TZQ01#LTc-9msG6n_V=R!eZ_M+e+7U%#bCS}O?tzSiCTl>k29+V8;*obSu)i&C+OD5Ah1JRYN zy|5=R+V6^<#Pnc=5khF#aH44zX3)V5?QXsHzRxK)Bj?ia9Zzsvu-YAGgv_ga-hJrr z2>u4)_JVGR5Nc6PkZGRh5K*;^T%=FDH)FJwgS6&n1o{#)oEuoZ`BoQHuoKX*Wkh-o zpn2NGV_PgEUo%g4)iWO~|5`B-UvY%Idq)eIqzAtK6q7dkwy;^oa9NeEzjm;k$gZb?dJ8Dn zC=4Wpu9c0@`P6IQS{|Pw?E?4ln=J6;k)j2T$(lqDvKE^1=U9sfO1$r7ziIRQrHics zqmvN&+VAt??zO6-nAc-X@$y%!Xl)6@id74lqDn_cCV7w?eVv}mnptu^ntyG(Y@{N1dB_6y6pi-Iewq60ObEsl%TgN3qL zls7qIgNXN7BdXONDmh?n@)R1wSPs>YDvy#TDM@FExuHlHUG~>!RWAI=NK>m3Q~oHe z#U|YDLEAWO*gIR!jBeUlSfFw1#KVHrgImEyjAR6y36XjN3s6G4=~%1f;!jujx_gZz>8 zE%C#<8H4y$_CXTxEw>_n){RrJlDuQ^xuxN=d&`ND#>B;jA%9t;ouKdhb2+j)ezkTz zg8OHM_Sl)}1xGJyO>KTC>)dI7T*!2fuAiJlT+o#^oL>Was`G15XhfZV+X{8t_Vj}L zTo8Y~_NVfg#`7p@-&i5Fr3!_{FCERbBQwlUi?N>(u5C0NsHrUN4lSvd-@=dRWwug} zll)X`q?(LJsaj<{F5HLVnlzS+C*p!eer8@L*D1_T5~UO~Mb=Fwf;Trw;y6^_DK3fl zR&UFH4GrwjecV&hE~x4&J@I{3>?W6Jqb#B`mSH4rQuao%(A-}!QOnz zf1NyzglsyScW8mN(3bIN6@Kl3`#ZX&^2%4%W#rA+{E=tV2?Wg=DGFPEz0+qQqvdZg zU%Twza8H%D`_AJskSM&l@rH`qDtc62;dw8g!9P<7 zI3HT2=2g>~_#Qk*bnlD(nU1)IBh)r=3*42Z?69jlITmh*EhqEV?5K`BM2^P;&e;as zCTfo;k5|Cg>1jT@ystg2A&3NzR|n$>&P9qzF#T#?-*bASgMo)nL~QlCKTVLYf~uQP z3CcP5G>BV)mGLg#R^g5QH5l)DHX{{36X05JJtsJ;xywt43fXka*T>kb5)>_7#(sswV_%nk zHL8$9l7|@c>=8$ckxu(AhUbM`Q&Y3{sL`I+?p?UGwRQK*L_qB%CNc>$+Y{wNPh9Ng{bmT-dQag(_Fgp8tu z2JI-9cILo#ItN+E-EzE?TJHEvSND>%8VDxgHOn*hRvU&~T{mlva1TiNdk~Zq?-`zW ztCa_Gm6h6a+<00Z@fxhS{ere}I*{QVnJj?&P=;Y3?T${pJPeMZgh&M8`W2$Z<@Hau!}gt+h)5VzRK&nUyxJ%jq^!NK^vI@! z2JEE!R58>@B}UMq{k4a#^jFLgf1Q1OF;aoBi-!=U=DJ!Fc>R^I+njp$v|>asf?mW> zuQfL(P`nzg;o#m^ia#{Mb@6KST3qrhynD%^XHC@FSPas;M|fXYW*~bvk!Swp>oorq^FtMyca)xyKX%dlgw;!H`fh+ix}UNKQf41{<9{UPe}|7WEq@Xx$#5pior-Tf6b@GY zB$m5Ht-K~sz;#9~suCj)AxWHjJ(y#Ye|4P8cRz@~^y}^np&-|$Sd**#gi~R0+oj!- z4Q%cw$hIAsE$faQ&EDbsieAHVbZM=diiE#LOXq{-D>XF~rjysw$rz+4a)d{>bv(*g zI{p>z5BfbQz|K&GY8@*=L}LP6z-2tR?G5K2C$J}hW+?4SZ4-p4PRBCpkQ{)H1=$X? z4GjBakIoqk!L=YC1|^iJvI52<2*@xhs0jam`3N8m?e2MdE0O*}dleDXm(Am~;m83b z8e#p19%|mi{TEMJg}f<2T_d%MT)S8ab0O4izRH|N5Xpb~K%E*CV4pxk1dS=8pKgyw zZL7Zbf;DGW1}c813<>uK)Bpw8fB*OdDukGtsTWxrmz4B#C*`Zy&yP4RfMl1F(?wp#t~&j$XI zk3b}N+_hgIMH95OrE_$0;x0_BK)+RNQ$zv${a+C3G7D+CLLaZR;YVT*K`f>~P*>Rn znL!L2idDc3*164s3J?&0c7_;1V2@JqLxumvs_|cF^>383L=6D;G@^2TQBapBz=l0# z^@`HfqgVApES3ZtLpngzs&@=f$WvE^X9!_6o}l-g+$LR5*@0l@=qjax*y;(w`H9fG zRIlqLs^XOo{eTaU@o_!XJOVYqQV7TyTL!99^L|FEMm4bZjB9fazbL2{S~C7`Ec>^w zQHm7+f^G37Y+IKNNtJn_q1}_T8}JTx$tqW`Q2;e_oe(f(N&&_a00=KjK<&q||0nPN zc@sAH|3RMA2}F@xGJpr=Wb*S?6kxvJ`DKvw>6)}qS!|o6khhYde;halu5;+L|@oDk!NaeD_V`6p#x(+}M@(g^xSF`?Kug)d(39V%T%hoj`t^CYk8ZXAkwKTYre$5dQMt!Z%(bxso5>O;UeW5*74aq`Q@p znbpk&Szq;aj94^Y5{BWu0mdfnf4O!4eow@ZLH!82#q}BSQ^(JSf%Vydf{r)M`>P63 z`r$d1;wVB~c=`eA)<6#dtf;}ht{hhc~}a3$@2=@+cW3g@3A z07PVNqqqSeUj#sUU#@RU4F3!J{#^{CB>+~<;OGw(Zdw&t{lkI7aqP$?lCS#+P}R8~ z&;1X1{U^^pgcv^ZADa0PM*+eT?8Es3GwBE+vWGw_mIaI4geToGP(aeAW5WbfxbnMn z!q6M{YVsah(WiS{-gWIebQ~!gJ^em<$aKzAWs7m_)N|>PPzq&#o%}Y`L4_W@N(x?0>U`g5^Dbye2F1CvGGA4=cC6kxyX|x_t0Q|QfTI1HB zjD;G^_v1GDGQuuF|7(((>31|UxvOycd=@NOhwKs<<{J9RbxH9ti3 z`G9{=0+TKZCX)YC-6!ArG9pdXqlSYe(QZw6ZZGH5iM zP5Ue6-kD*IZbetb%D`n9Rz|9e#2J0Oj{nX((_{A-wFE;pF-O-os0BtFza@`HG}_Wv(LdsZJUKr;|vQ-di*193S2 z;)iNT&insV`G2y1Lk{|(01SAAUoD~yU>R6G=+44!NgM9JPLvCgx(*-3D*Z2c7GQ)7 z&`-KqL{$DyKJb>gP~hEjp%wGx&=0s^XuF_gr8xRu!KTkcbg>Kn^|x3j_#hSW6lmM{ z&imEg%he9V^*E^MHY0r74#@Up{JE6lcSZztkWxv|!B#4&F)qLwh)^8fhj(v&pn*-J zVjzMd2KQlCS%iMVsr*Y_nLGb+CV)Ki%?(EXfJVM7I7^r!xt@SsDfhgbTSEJ9GWy@; zmr9iX0}WC&A_K;l-5a>9T;8w^pF}o_1#*8uGo&m>5xF6)*kY{MvPlLBJGbKsJNKS+$`%`Dnr z)9uQ#gZ|btmvJ7jhEK3lZ+}q%V$7lb@x&F`fF{&mO-xg+Qsa=(|dpg>wukW*)_jL4H)^*2fG)43eW-y-~-Z0x>rPO)gdsI zL0xAT!&%k;^|w#w|EIRKG<*Pb0)N!UqoM>VFaS&h`@uV`mD|$)yqW($05JiWow^4y z(~<{03-NJ2h!IaX;Cuj27my(I=qvv94I`@lu+@NGni2dTdMWo848!pIfVUA{XDcaw z>hkUaw5hz1??AKnuznDpnd27!qGoxJiHFbvUje&88u z84p8}7{0POxUVLfz(vb_`Vju7>bZ0@&@O)9rgdR{vI+W+hsaAIUc3g_`p)8G4;(ek z?Hi$m8X46$r2HqnD#?{kxch$uJgJHA^R{!3B z_i@b*+D+7j7*_HCIC0zf6Zb|W_ofr!-7Vw~M^0>P{W?RV4YrPnkW?t&`!0~b9!;J_ zzHPKyZ$$1_wAd9%VyPJmkB?_ferPx7v;Ss3l`EOK5`s!8C@TxEs5B#`P0rfLqfN&- z*MKt)Z6E36Ohs9N~!p++%%EAv#FYA?2Ze(Fm zvd~07xeVmPgg%O_7_94nGKaqlmRA3%NV@qx5?Eul%!R0Ilvh}>eozZz=Sy3Ddzjb7 z=D{2^k5uax(8BKw-)sgLV*vd*)DA-d(try}oqf?m@r6btq>4!>@#~r4KCcl9_D?V; zPb`%nso#}blM>%AarR!~x4At4bEIz5)L78vN4Mk&Q#r77tGB|2z3sewf;Sg!a!(h7 z+|4=Bm4xJtC?_pFxFG=Uu%G3Y3+$TB2YWs&8fe}e%y4KX!StGRA)@rC!)T@+iWbN? zFn2y(Ld#HU+-O|31sliI?I7z?rhC#&@buv+c(1{a0E%lMLEzRu(?r@SMkG|%me>)~$?EVZ`MHqjF zfr_f%7sBrSGFa-{1c;mh;bkDBQ)2ptpzVRSsN%>Di%^{_Qs zW&bVS1FBa_^*o=*!SZA+>qn%H*Ukls96S8$^KHT{Q>lv8*qb-2>G)GK^05iXi7Qua z-8~@-ONB7XQoRN7aa4`Xdqdz+BXfn`f9kntT8L-}0B&$`chQO)F7o$|o^71N)*VXt zx+#N#sw#-HMAz?ua5bLjczNvr5`OaAph`^@!V==cyvJP-DF7lH1zV$A7>-Dwp_Q&( zB0YDyEcP69cWnn2xufiOWjdDej2i-rb#iD5a%XbdXB5K|UG$5`NnO^)%z#i(P|$vq zwkg+cy%R{<;}K=BimbM}AYtM}l18K$ZcrBG`aHYV%HkBq%8Rcihi6(Bw6h3V&_X0>G!*4hq13NyzflvKW}NmZxl6m*qk1;Y*4vc985r+ip2QnJ66 zVI%lPBOLYuk^#5wbB{a0`RrY9v%D7z!=T$Fb3C8wJR@_kz=(c(G%sXq>;zMTiHZ{` z!bjJeODIH&-Y}}74y=_ticVVWVMu_4Tu7|Fja^!=x7aq^x{kFheo7FZASJWy2ftsB zP$-}i$v9wz{tVln&C@mzT})J{NnvuVy5e%b`mnP-$-N_?!SS=S(z_h z3~zW?2%NF*!U_D#%XQVm`7ro2Fg}4D=T4|%?R=sRx;kh_hW$O0*s<}(Djw?v#CB#UGN3>Xv}uO2lw~Qr4=GF} z0^Q(Z%0SScBMTCZ%%EbS^z|H?8VcPEDalnWx&xVAQvN;YUW8=(` z6SB<4!s1~&#^hlm5CJhKmW0)~BtFJChIaR_&;4rfvTX|*9FaI`OO1mO(_l=O=p#7*zT8fp|ZJs+0N$GtYm;#|x8 zxllK_nqDFTMh{}!&O=e@v@7VN*|PJjx|i16Tto!EC-YzkOq+C%8CDzV9+tn_QBrg* z(2Ud5wY(v7CB!5bYaL!SUXyr*){Eacoc1;r&Gh(z0<@qF737nPLB&bc|AeORA82wM zbZ;HKw4#jm19oePDn92yCw|~}O8_4Mp`c7mUT+Frk6pC-@Nhx>TwGiF0A!E_>4)0y zFWw4x!6Cs!-RRIwb^PM6$h^mF*4_I#egQ=}9ze|lD+XkuA18f;yt;Z=Vxlt5`!(Cm z>0bmogiSG`lU+640otZD2lc!Mnub0!qoIxLwsX;CS7pz%*b-&E*Lq`^PjFwo`Rh!j zApWBk0PsoAkdcuY9kw7=e4kH&IwR4oGCR3ReoxQzVhhZ8hH_gS2kE*~Y&P3|r%ApT zHC-PcK9x^H4-IO1gBC?O(E||}Mkfo@Qrhc|H(i&5R+w%B>Q&PV;NUgR!-|>Q7s~zp z^>j)qsx`q_11Np6Yrf00soyM+)7h-{6uKY@7$~5>V7!G9;sF{oRG{oE(<%g{?Q99g z{U8r&y4XMnOAA@_7ba08326;999$p|1GI>35n_q08xk-hE9hf2trDKsNiHfo6tifk zleDZYJP_rrc__-6>d;~%oHo(Mpy`e^AeOJPMNEEg0mJ2VK?`2zI&Y6CtHL`cypV3z z_Zuf3Xkqra>S(^tZbRyKpr8WVA4q4ha#*q`8unByiY5$+u`c=A)?ltK+P}LOmZIwo zpQ6M16_5zYb&eE-WRSYVf%A4|aI+jARw5#z*qoskd#$(*dhEDyqF`9!X<1};dV2iP zN%KnauVC9i$xSZVO=z{>%lT%QzXV1_s@Vq z%FZ~<7r2=kH3;}Gpc8P&bhKKH{gg$p{r#8Q)Abf81ye|9@`%1~j~Lt+-NwTPb0tdT zH-|G&9iDfB;^xpBu2SU}<>fx=)%tlmuBCcibm`wL6yae^NvBvg(bN}0u6jX}U2Xm_ zNL*@`&?4P6=@*K`&QY~(@s=|%Iwrds;{H*wcf9Ux3@47!&5ek50 zO$I*39Ps>Fhi2K*P{`et*>2XNv?)v2X%?ka; z_jc{EEG{SnX@rxqOr*Mlj6CpqplUoWyjxpwp#*edlM^erY*6Y5`l z%y0l&5poX<$sMsesNQtMyR4fZ%v{m_28}QzMc+=aEoJg2xuPWX?&jNA=A&+~aKx~J z$rT6I39aA|zeUNIuE!&!w$B{91^rNbX>%}O`(g=2K?hlrFqrzvXLfg?sbuH1S3a#9 zjZV_`Um`aY>^ObnI2FD%uS@y;=&p`ZF*|(XR;H@Z{E#2PaP&tC3DwxT-*O1JF(LGw z56IxrTzKT!?nF=b=j0Iehzzcw6lS{KhosWJ8$F7TJK-quUj~2nTGz^<^P1cbjmaH1 zZ{ehn4$a(a#|r}o8>pM)jo{2PaFh}9r|pk5D9UqD{=$9ro0zFnxke_7RR(&w)kOj) z%{~hhv_5C6QN?+(vC@w8fX-`d6ra(`dz_nYVq0UW6V{*;t&Z;*0vsxvX5oBftRLI1 zGZ;vpXl}N%FB~WSRUG}aNVhHn*?F7(Xu4}iLl03=rSnkPL1yfvaXms*>8Du_0{k!0 z@lFr2{=B!}b<|%5(=ijnbuQPF?y{=R5GZ_4<<{i_FE!~@2E3GL91B!E|5$ys82lJH z-Zxa6<-v#g(faj~;C)vN&v{H{w}^-Ahm(gXA-t$zmz`Sp?w~X}l@9xPw>+pBYe|Q?_&B|yLtiG`d>ZFrq{g)r ziiy{6z1|T!J{i5Quq!1|cg_F&5BKtcw;|P?Iye~eiUz2ApdWs;;(@yM)sWxlPX{_q zk&&q;^8pOTGARrSYQJD}l`+0&-;H$OziOJ2Sq2{}Bq({^(C?M;=+kG0^lt%Xwsi)l zMoVbn1_hDa#(#wAu@z2Zi0natJlrq9Y29-wCys6LVq; zZBG~i$5R0zjKKGTNG9F$USM zkDcM_Z^Te+b-o-oQhI6|jKWUG5yL}^aXF!FlI~3u!$XSS*N0)C7RH0AGUyBW@rRC* z5!C1DMpRXnP*Y5#_t2S`#fEEw;}}-qjKywE5SS=u)@cM??aCM?C!K8C$>sEEg74j1 zVb>vYZz3lKiFkry8ZxBPIFHImOOtbg(+J{wVBkg9WLM%OmrFR>oY-M6zO2S`ZESX9 zhx7@SZYv90)e2F(#+W5d|Rzg7Q*h)LqE_j)OJ$ zaBgHIu1WTMWfsh+g*feYDr>YBw(IFqa26M{R3^r99-m0#mJ*f)lCnyj zqCGf-P+v`dgW1|}yHDJi7Upl&p& zlrK6_2w25G)QbFm%GpD6d8{RDwub~jz^v%oP_c3{VPcipE~j2$Vlgt#RQc< ztVYWbLL`KQC1aM$O)-~|a>F*@C*VKY$D!1{v>Gfaj~mV(G>>oW0%1_dC8L{D z8yN+k9BqtzI)B=(B7Q?5Eg_6rR5f}oA&q7}hKwLo7av2j=VnV^`JuY`vNVS?w$F1a0ES+V|4 zYx)+_9VZV&hxY7Lb59i@O7Ve!NGZNi$9JL)HUOCIu?gkDT`EDed*tqo_iPXbk|HFE z_-jBw;_5lWGY_HSh9!`%hjQ6=!5QQc;-SUEL050SBJlM$Q&%BF)$B?)W}Y{#c%yK- z9Yq!wCvPrK3&le92u|)>$~TB@vrK98$2#H^;zU_X8k(kVg1|)fJOGF+tUa- zoz{(QIo_TdvmAOPu<5^x9HwLDnfARf2MaaCy}d^Sv^LnP6}rJOd{3BwgHAtfdv`An z&A=&vrD-#`Y?o)TJs9t0GZ4OuY5yW&{d~VtTMhjiFw@@ zTTQMRJuiS&oAHcF+jS9hm~fDHNh(!US2tt6$@o%%;{qxW#~E8+Fyft`VJB(L~mCsS3412h;t`ND?)p&y!R3Wh7b= z15CA9kz8N@kL^*CZug4@l)-{5a_hpMZHO36v>=CoUvN}LADl}PAUec548j>s-*KaO zb1;pfpr9bg+dnj9a5%}gz3R3mRJRvbjAA-l8v@NhB6p9c!)SfLaC7`u4%LD0?Sc5L zd7JL>tfV9+LMOKRXRXEG37uUka0C}g=CRHBisg&Rb?T`CF}C4b$kcW{x7|DDec!-j;I7#y3meCWLEJ$kXO zi+|=o0zQ)3HMYfv-2q(KYFaHI>?UE&lx*2@GD!7BzxNQy54?|kN0G`tx`6MInp!MqzV@p#<9As_-`AZ zkjrL<-Q1kAZw7%jZ+!nU)#iqO=m#u}2kNwgb9^e zi^d83tLrBtZWGZ=C6gHSl|^y30X!O;;CM#}frsQa9?>*g%ohU>kB%_7>*2*|n11NZ zA&bC+!}&mevAx?nmIP9*Qaf0V_Yt$}y|X;=b3OZs_5K8IF#A5vkLYND4o?@6p5G1b z16RB5#}!RT$d+QG=(Ic;;3vN)hHvAwghF5i-``#xUM_*jEv0C@FBd}6IdA3VDQv6G z1?6ad@2_ltug3;dCe>-*R*u_>jqIVe-~1V6znxWLaK9kXATIA+-SfH!+Kxy16S(Gq z#Oc3BMS%DvY4Lz^at(;Pn_ubS!?8Q2H}-;p3J!$AtR<1JA+Us)gwqhKrk5&Fs# zG+Xa|yk;@n;myQm|5wUh{D38rl#j3ZF?74qn|v++TdSL<#oK&!?i}1#OZ} z52CKbjJP{pCMF?K&rF~piDy^){3UD4+|Tkz+56_QNKlq>hg^lKKs2iNawx~Y7@c;y zUBn5dXczi(Prk?(M)N7e_KTP6En&Oqe2x>=Ic2y?VPkXnzPntJbF-54hG=pKg5c-!+{_oDBM@fxkwQzEI|`y0F#Tf zKlG03ijfTG36XYJwp!x+lJoln#82`i7aFX{ZCh?x$I@?M@`2SZjHv_Uw*6_uvVGT+ zeDselye}I*EAH1e?l*bQ4%f6z`LbD__MBK$<>G^bL6db-sb9**vR^sqT6PpyoF;kL zO*Z(4+2#u+jAkpaL%KKg9jvq6xBR3K^!Ctq@_zQ?ngX;@}%1 zgTLQuOLkoICGGA`7Mm}6BQCdriJx0P$O#yx0!~h}9ik`GZyjJ@OAVJaCj25m>QMFD zW5(RQe!(~_@@k6zyuBQLxXZA!f3oQ7Fjxf(6bh=x=5~iZ>wLCN3nc;3s<$R=M8QP; zzV7>sMxOnE-+sTw#lD~EBTeS|46;>`%U0!K6c5PeYY*BR$@YVplTs;fk!@$KO4g}CFl=DLqN0X32ctpyM!w~yadAv~ z7ESNZ>(<)dm!$4qd$5^|hCw?Z^J#TLX@$1f)#dBecuGaGeP%i1ciDdS5SVDmCZSwE!(9$y1(4@Mee_cox8>L+a+$)ZjVN(4 zQuI9Wh{d9XWo_}uR=HtvCf|??L}U8V)U+wBzWpVsbRbbQH@jc+!q4`5gXHiVNfZNQ zli-`>tb7>HUEI4nAJ=nvU5(RU*%}W+{(M$WGEwX39TR)|V(ZR>wGO}~_Wmpy-L8B_ z+?gks<`i>`DRG#Z%9_gRLM4mGE-yH!X+>${dV4e{NI>$QM~Z8aLhz{3pW zj3!Hn|IM(=4jRJulV0k1CjYXWl%5_wO5cSOflga_)zC;aZ&mTnBAsGO2+MSk?YjCP z32uI9VVKYZm{7VnmRMKC^Zxw?3b_Zq49(5Pkfd&R1cyYIUzF`@|Y4tLv zCIL^tX>$m@mYNx9_DfF8)RfG&?UZ#g+BGG!$#qrJAqZ$NLIE-bEl&HAm=!>XLJVh7`4eMw>n;u~+!vHT!O`2Zvo=7B8dPP=-dE|YCz7LC zIFzBzSu{+N(u~8IqU|-Wa}2P^zj;)q4;OjvPlrX*El%X1&cJvK^>$;%uPoL<%oCUa zt+z+R2tRWZH*=?HGs2 zj(abtMvp>=yG9^gW#H0FGBi=U>$e%bY&(cWJi%{VXPO_fk^0*O46NEli|Syi!8eme zqZI;(IY9%14=_A$&uCpR_Vy*{GP`s6U#^br*bfT>wqon&6*y1Vvmepv*x$hjl@Iv! z8m-gVjW}eL!GH$V$+D-`qtdbq0QnWK14q8~qza#3ns!IV?|HYx1mku-n{nfZPVQ>F)qd*T~cBW&X zFz$SvnjQO>t9A$miJzoW$r}LvM)Lxqg4x>X+XE}Ms&v29bbXOh0I79!#JZK`PNvZb z-^kThP-V5A{T);W3)`>fSJ5rQCev%z^-Ac{anW0MuK|~=&4Q}SY+e`#>Y-C2LK)A3kZWNgnPfQx;+>zG691E5@Yj}3uDI$c_P~!aXR0g zv@G@y0V^E=!~uHT~(eIC^UGsz> z@Zs8;5oXGUfP6E!QLWSrUh{eS0y^l?%i>=M?0#-1DP&M_bcUX?^?Th_vzX})01uYm zsDruGuJMK>lWBz+O-{Qg8)VfJD+r0&bmi)3;rp@cw(b{GJteOYnr;S{66+cg<~GdP z&9PE+4pMJBFy#qPLPowlQezZkZ9d=LQhe5Y#|yxe&5P4w&cl6<9}xM&gixY9a)cQkWZZIyjzuhgy2p>i z=hLlRY|_)yzz)|Qn>)Ixtje@gHrDZ^{IS#cz2~oUNu<&C*U zgnG>I>k;ZR(WX=|NtMU@&T#xS@l;Q2FI`VUu=-_l@E1Q+?A@<6aoLkRd^#sN#|+0W z*OR;MmPbm!&cB{0j3IQQ%XEdDhCrLnhLOqf zOV95a4$t#~(Adl@g&+x`{m)U;e`FB5rz5)K2)ZB2?QCcgaPnADzGi1cKH&72;RD5K zx*30xsliNuuwd#EP#8CgR4f5DcIsd(h4%?48f-C{n>bSeQVWYX`G3rp3Xx3T&zWK5 zOoC~h*}q><|42xH`m7F^7g|}q*KZDv)0^T&w^(Lb#Otdb&lT^HkL*wr?-YqHl}-P-mSuobs; zW0(%%+q9rfFIUb%ZC=e@0mgmgFStIP5bD0Z**gi3r%ihHD-J_H5;%aGQes&?9c649 z`mPru@iY4(oH;QmKi%&M?_Qr%fx*CjR7Bn0PH>FmsJI;@t8|?Yg6)@s*!kK9<`kT= zfjICzDex}H>U^*D(0V?%Nr=R%7IXP6hz1;s9O4qIAqZ<;P#AjNz|`Uc<3g0TP!7J0 zG3JT8Y!p0QIY&4hASl1$yL_52%m8nr1_Kr;YxyQ__zWOrMi@VltMH(A;7UbB(6#n! z?X#`>1|t>4AZ~>R=S=}Y0o%t62+XWjRasg+uyU)SX!Y%3&_>hUu7f`n)PVC5ieTy! zW<{<`zdzJpFX-qPPSLfn#_4h_jve+licR*a(cpAx?qG^gb!~rVC!BNd2YEJ=jk_-= z9X|6Q_NVy$>!J0N_g91tuRGx(SzOXW5}3f1cJE;cWmZsCoWi&V%|fC{>WN-uCj5l+ zBL@Zu%%s>x5Ny_d@{Q;7&Y=qfp?-83*%o~o?Ph495O`3bh&9*aig=74cfEqakV#n1 zof%W1!B>GP(XOSLWPA?{c)s>bfNIg$1N)LhNrKD!a$w})_uk>Q9nEzvTrIZSe_c`v zfNY9X>?n@D-xa)f2L(VPEt)vuPL>A1`I85%> zF=qFJO|jk7nGpEC$fh->$1M3E9U51B;^>xYPYeBq^6yPPqZKYB^#sCENca>Mi4Lf+1kx8PM@nH6CqhN^p9gsmmk(uT*RRe*Z}??X%8Rn zuPMny;JmROn0r(l?f|jhh69^b-%Xf0{1qArE57%m zfl~(y4Q-h=Q4TV4Qr_3-7|wQdrN9SDHXZhi0PY+nhny>?CApkkC@k-edWtk+X^T^kn54OT?rxad->!%N(;=(ni z{-T`i`(_o(HB?x8Q$qmFvf)9}4M*?oy>sQUShAhH7~`T~X!}bZAFT=rCpCE7riI$i z>B(BND%9v02jBzF8_-5W@SeUqo$G}VIbNa`1*z+$RmLS<9(}fuR_K!>Fy{}Lv5WmUG_qRi7LMU9bS+fEfyxkb|;_UbU_34X%vc2=BL-l+5b;;Z|87SEI4!2dFm5+Y$Ysz;88EevR=(S}CxCeaRnECws<$q`iql zb3JeJGh;p10+ezN*g0P#3Ism;RqF>8Q!FYZdxC~U9NSbbX1VDN^)R{1Wyu&1=jxcR z`Tv&%P?I1=6HOC$o9306>UycVE~ldM*+*RQ-piHxJbUo)+d5w%6Ow)~_hT!Z!ikxY z*PqCQBU`X1qzFJV1eg#;QYB zn4oGT+y!B|dik3T03#T-=3$Hv*KO6%KWCdpCBn;1RugaB4ptO~xx&%tmU|NI=|k!J z$v!$$*=@hpjyLv^eN*R(iMOGkN7wmG`<8i{vtYRb^nnrEXvBpZMYja^IBchQ)@H5! zZhxL;A)7C{vE%;!u;t)K_V=xkn^ig#tlWR;UvffOq%$>CMI_s4%v_?z@}!MHCbF!4Cy9 z5$(xJNK61=y7o;A{3%7tQafodl|GvkIgJ!fx~+QLqyBfaO6pL!EKfFpbGT?e6B1U4 z%Tio8XO)uU{vn53kW%5bhR$z?+oud;Xs^OcTBjh8WdZD}^Cj~Hvy~RMuz&P-|9ajQvo_h8}#U>lkGV@YoF78+~N*mJ6E)me*)p%4Hfy>B7YC)$NZ2AtB29 z;H@2{ARZ^67%oBl8J*HhB+@13uL|o2xmY2>&K(|%D@5d1!(@qxwB+5+B{%CY-$HBX zdL9Qz!yD@63+O2WGx^Z0b8Q81bSWUapDy(}$C3 z0w?w2c^LKKi5j6Khb2fR=A9^FvWz#o4#R?sgRo)@$(vwvch)?d}=gI{nbEI#rMHP;%?^y zIB!#pi5I()35|5I4p><5n^yy@F+x6{7ltLHC0{EWn1~skW{Jdk|B@fBv!7au2Y3iX zEK;tXR5a@DW6u>_)-zqNr}5P`=o60M#@z3GEDXoh{D9jNgB*b^?21qIm;n5RaCr}P zwA1)s+9;m*|4XVwSK3J=u>S{J>pfomjh1W#kZRpV=?c4{44kuGP4g0nuxzAXHR^miBEm zQH2~!H~AC<-<<-qP{Rw>2&6A(fZi7-Pp2V^wi!AbdjFwODpDKJ5HH)WR;%)XEm5~HBn(_2FU&2t1bi0JLq9~r0WNF@Zl#3Y#G;jlr(B9q zRQ_e#@`Eyi^xJ^TU!;HsKR00ebbnaA-8u3aGuWi}@B537V*G&WsU*+Db{t&1W?K@+ z%)<#_Z+rlu#6!5fMyMV4St!{utL=JYJHZ+QIz($~zX}=t)28!*MS|;7v_u9M;Q*vy z$NmqF;0Rf`SbfzNr@5WZn6liLPu&J&e@YB?_-#x%ssBijZj|iYXrtHJPRXwAHXy6Mzio z%H%Sgwqa}dNbqAy;z2p87SHS`jMVw2;|42BvCeEY+^hWxen?P0u``B=5;gZ-DIHS- za7DE8{WQ_JM{rEcZ3L28upPiexx#LVwFb>Jxb@;*Y8UJ?j0*D_VqPbAR-lTDwee=? z2m>7hN+Hla1m6sL9x{FWwGC`-q9n=PUC;MF@l;&llLO_y>~=2$o*v|J8yXti>{V7- zU2pqdi{m9n7bDgdMDJxgb)M%)7*8H9bfhGPVIMHMG6tFA2%7%e*N_TI3bM43`g1gz z_H*W^r?CvA5$}5`1h}|Io~mBTmMw}SS%ID2jZ8T8GbKq(jwQ9K@PP%F>BaNIsUjIf zn*NUkNaE=qAx5QKyX_pkch^ha=%2awjjY5b^eTCNE^nxSt+du*nMRxie=g6*@I!fn z(zWmT)<8>l(ZYW&1E7=fSzIC#D>sy_2fk3g+$D_Mc?tnEVQ6&IfD*}O7t@o@;3%!- zZ(O_Q7_+e=Y`(k2Jd(QKm@vi0W55~c!C=bKwL+JPL7?zw5!7tA>AluaIdt|y=VtBp zpkuG#Ocr$LElr|3zqhOQRWOU+eFH~0{DqYi_eN=Q#2w>2^I`Rd436#<<>j@5_f9c^hxcKUaet-3n| zHO9iS zulLv_wg(f$vf3Cx=L@mNx=+9?{!`f&T5KVtlf0GB6S4ZeXc}f-QNc3)>FzqVd8S2y zu>Ijgq~)UAuKk974VmM@^W~$K#Q)8}&;OH5)Ap^<^WB>I&(@zRDss?5DMttfq)zq- zFlEV>IvcMl|J{n^#b_iFq?7MXn0_7aQnw{mD+rfX_+apm+3-)VQ7JLxOXq;Yw;C5 zaw)L+;xn2aAVTh)3>wGVy|;jGb8iUpO3=hCu-3{1NO zZjYlhG?k!QXpx-p-;X~O9QhLc6jH@?M*_;szGcjHufYJ{#t7aW9hKH2L6j{FVdaH$m`~>HXOaN`!D}&JkBnzN z5G`f@vZmJgBi59XZar?T13qc~hY$qwhl7nDQfCq`V!xM5gbR8I7TEnCRMj`37c5^k zSl@xeY@~ByxH=!ph3=vlh)uSu$k-M!adFPe8KS_K2NPL*YLnlQj?xtbeWUbf1pLvt z3CeSZy4XL305g12n)^3>>_OGIGPy|n_n$R*IK6%N#Ka7lv*M*oN7Ds-lM*-f&>bzP zc@ndB+uZD2Etmmv{{TG9!}opaJ|KfAm%*N%vfXG8y?_x!a;u58d%O zt`LWO-=TDag}(a@Ge_g>@~bEf*BzzpE-eIsT~f#S;;~$@CylvDrC+_q51~aEPl8yk zxvji(e~}6z7RIW9x?(cM3)*pgtc)Sy;;6k|15C~}yhwV(%&}9Hs*VUCY;TH(h9izT zC2`Xp`n>`Em3VMLGc<|4SOwU6&Y@DpliAJPN^tI*d=1)TYNRAEZkb}M{k%>4^`D>N z?VfLvXPLXP5w%NH#poSY&G!tpy7YqIrqum^SVynbu+hY8;vKZIVy%syaHF8AKo^)mqF*grA>y zF}6!q;Dc`QFSjW;f?W#o;u9(ssmapx+z#0v50sfKw6VGFIRvVkh41QjFZQ>-XdfRI1PgL^5_bpo4-2^Ejx7t z*P0J0PRfS&KV3h7jS80xV9)=T5uJBI`OJqnSWquuKguYT-#)fbbK%M+=*U6+`8bc( zSq2D79!2(6lj{8cn3UA<`)_q9nPZmn$ihGs!im31MvNfUlyrnqd6= z)E(5rXUqTp9#IMEUE#VRc*7jhP}hUguhu)Wm_Trs6M^vvT~W3F0qI zek?$=v+I5Cg0*SxGG@Qhf}auBshl$`!gsSFi9*&Ft5+<>_A?UUr-7g{sKc#)s9z@A zPcszic@H*4Uv)iK29kB1Y5LGx!8b9;OfH@wRZ;?d;KNR9+TcRMNH zsqINA$AR>c;M$8xIy15^aJ)%MPL2jn1JTb<_c0eiVWXt=ECpoJ0jfilp^vU4(OS3h z%csomgQ7+U37?bu%OSk|gG~`*1CN#sZ!Vh@}u6@jl_8uB}WNpqdMIt~=cj)JwyGdoJSF;l}oqsX!qzXrWmNYfDkfKgmB5wy~Gr; z{jwzT`nELs^jPBeDhk2p{f;l_rSfTO8i?;DS&(LbV#=q)qcca(#t^;V;i9B~LzTb}PV|9a22Y%<@m}V_>f0_?_COT4)U{F=bv0aezskd0 zc1IN?$OLntL1LVUv0JoGcqZ8g!`m(8hGo3G7Q=5B#UGZ@&8*fUS!G<~3#XgC#~42_ z8*c{F;2CtDhbti{abDs{eAy~O7(5@k9-pl!EBHpg48z^4qNwYB3||Zke_w*}4-X#efaVBOcs0X5V0oz&%-wJCUM^ zy!BT;1O#A{+hWB3ICFw?z2UWQY5ymL%1xCp$r;Y@Z``tPMa(UACX3edL8}}Z=CZRT zPO7g$Re^?K^1I$zx+f&8B1eQkU3B-jqZ z#gEYq#1wH0^S`M~`#J1#t~X9ze)=AXtS{=T%hMo^#YnPVZ}Yi+v5ibbZV7_y&3Dkv zt-`!_S<{Z0Qa;TW-*K%C9`A^jPs&J3J<<`=JB_lu0ZSR|L@A^k`jG&TtvJcb?N?5E zDg9ffw2sH(vrK4chKTDm2$xzce(m(S8}J>IIQ@M>Bf?;P8H%xx)-^-#iz>s8=p;H> zldYXH`Mhz1{$C1&Ooi&3y#!x5a^fi$#a%eQ^ma(=`T-03{&Yc1WDFwW5Ix#bu>zqa zS|)z?_0eiXGQ-#~IEIMDzSgfi(O`Zn(;SxY8+`dk1h#1CF+Jx zJm|*{)&6eL*p?&c4rSWk0f)RR^W$;ec>Cd}_9q$pNn?HP2APM{`(jMdU`8}rOx4$B_HW3L&48oiM7)v{R^3-K3H zk`b=2mz~CL3lOY&)n>gH+VxyNl&T}bT($nWYyphvH-kEPj zbZEZWi@HC1>@e?t2LWs#brMd!=NN>+8Wr|&L^wIpc@J()V-?6`fwpxQA^R=X0Q;PH zezZWPK^rBb8wIrXB!&y#y~sr#@`W8uaXy*H+(*v@zU8Ehl*wSC=h;DVLkdaiRU=J5 z(>Ih^Z}+ZepU-z*oiN_I+dBjH$jI%J-`06aXIeSFWbw&nQ?uX4rW;_pl0^s$0AlBz zzqunS{Z@P!F`b>9^39~L)Y}WMYJm!_JJ-j#JA=)>_KsTL8^`uDDQ`V5elzTc#6Ui| z#m2)$OOaLtF;wpG&`V(71mM|>%pJDlA8-3=>uk==+Gh&WcI3>+`mo7o&3_B7f0GOj zNtZWkR#<9dP5Vjnv7D&Io$ahs2Pap^t#sz2849=K__p)r6QB;b!+v9~&;_k-Pr6d( z#x(2kS`PvmCEJmh^xHSq8wJu4&O7@vzM%LYE}(2|JMZfw??}SK-)6M?x-@unJm&Y< z?Q``y4Gtr@OeF>%-nrL<{5)NQAnrjLkw3V*@uHg@*OLz&evN)))~qOPDU0PB50xxN z7&HfoKrw%nYj-k5B!&85VM}Fj=GL0@n3WX)*TbwNX}AVyAow-{*NY{|A*K~lK~&+V zJ}^fy3&o6Aq}6M9!RJ-QzRh>27x3_0?_6B!B-DQb4u2w3!tN$lXrexGDq~iI40r{` zbW&>^SE_s1+-IuSeVO$em}iSs=**~wSgL_uV5>0YMjrNUh}+Q;BAd4E-81mH$hrre z`^Wo-s}=XfZK8u;pxCde-jNY9D!0lZ{M4?*giq$O!3JY(CShS_Z5UH9lb)9X(>~c5 zOp|t8-Dh$n9!(V7Z3N$PllPDQx7@-_PG3CNyyZ~LU0~4dS0o(zp;b;DbiMG$JFG<3 zkBXt;5id-Lv97KB5qh|9wY>ZR$rg)tAs_52>muuIl%1orj|3M@7twJf4`iITmzU>m z?YJT#X&od1em{>TLJC$rJ@0wd)UtRXD`{>t&Gs`O;HhTZ?uo7Zg7*3wFy{!6cs`EK zqYHVkE27IxLd9JO2mcEr8vd?}5_;&8j;OhKpjI!L9Mc{)fPa$dH^&Gjqrl~!L>;I3 z-R|vbF_Hcejv0Qs+bHPskBLmEMgoXIl2szemM|}m9S-oJ&_1L+Nh8K`a!xXxz+weM zyvAdgRP*1%c)vsBty|tmfJaD9)>ZW zzak$cwW+_xi$RG+Xxc1sFV*f~?d>B&!mR#lC!mSmuhj<(k8oL~fS*j^1}0-u%X)4eY$ zH|A^ep?E@1j4|N#)1TGbYYsIH+>K7NS*oes`L_w(QR+GzLLe=U>y}!at2Jku>3A@> zb>3laMv&4lnM>!`>NoimYtp|6Yq7b2E9VH|0)?z8)O37SIMt0@;O6H$Rny~L21m z77cFm=~5IXF>Bw+nN-%Sr3HSbBhq!6o_Zi))6&mpht|66>e5nQ%Z(5W?D9wFxv9ia zKa}nGZ9d2r^T1XD3}Hx(c8kf23=+wG!4%Do0C+i*d>HLF;(l{-%>1#uVn@i#G>@Xy zA)Q?hJ_%toohGxI*}e7I4C56F*lIx!k9fvyVM5D{HE28TL|S^{lx`YBPnmJ(dsH_} zycus~M9P2|NxjL7p$$53F6N`v^zt~qZfaYfNEj_u<=qAq|E#pzQ@!0uNf=NWnfkHQ zIdVR>P+6)AD;FuI2`v7K7b(*G62i-s`h|~uo&$+bbiJ+Pp_R17`FSfLjH_^v)FF=;*l z6zxO)i*!#rjH5ON^xWwJv-9^nQS6NcHvZ9&~Z@kasHZA9uYqW_k z)B3Tpjj-J8B$3CZp}!JE%4IazbBU2JiiGIQ36AM3pYkq{BCGSUsQpc~yEoH51Ffc~$>g}2x3AR!! zFVpiiYNa5}T0Gp`(+oi>#OFzSyhxdQd&B<;#+3+zhNeV!EUnO-M?jMXbaR5?B>ot5 z6SUW8CIhe6izdgol*DCI>h0Isy=~TO>q7}Ue6HTO>EZrvbPws0Kj7kgh>T}TdK2t-%MJXZA3AZl9s6JpC5=BJ-`)u}sn73Z3Ls_r zEyl&-#aFRRh@k+hW_UH+kH!@x2Bv4aH<`(m^td)z4l?J66NpcGrAzf44pQHnr9?F%_9 zMxmk$Zo%tT4#0<9w8@726M<63@>>b{aMI96*^CHp(JvDgXe4a0lOWZKHU=Y%CF|IIZrfRh7V&>#3 znR8LTyQS~z`4VV;lA~v{P*Eo0nezHFy}}?upKF0Q$9U?2VB#x(a}`>=Z;6&nZeGXz z@B>jdV9Bz^Op9`^O1F6&6YYhUAPC&|B>|698#OAy4YH#CZj+aw4S1M^{2{TkU| zJF6R$jh|$K%gssl+BJ;qABbz>t03YkA5d3&e39P%tKQl(_6<*O+9e~w+DJ0+(u^zB zC}OM-)pDC#oiG>}SPBX%O1?QfZeB|co2@hmeE+LF-2|Ss7ww8nDlO=+#=LopSVH~3 zEC3U@tH#05=6JdwCFcdek|i(UPUu`z*))^;4ZKKX#wEkVR3w}+!%m*Z;eg?{#1Z7p zbeVLiQ~rV2jgqH=j`SY5`si@7+A>Fgm`(e6XjiLD3Jk@b_Mg7J`f>5R#FM{K*}R^7 zFGJkTn#vXflqw7t3X}expo}t&dFh9x5yE64^BeD6P zH<;zWN;$WfvBG|-4f%y^_Hp#Q6-FH2K^V*86H(0-rt@wSdY#N5%~BVCxKNpL_AL*P zC7S%CWkQL@5g8|?(E<#g=>Gnyw@Zxw9ztPAP+v#wo8I2vi4iM=Bs>^ZDYE z!PERFr?^|U_P-gzTpHVO@&oRpRzN0I>!sjsW<%GCB(?$Viw%Am(-z-^lgx)*QQN6- z9Ek$6@6nFI{AV8DB3+a+0_aZVn1U*STch0n{>o?~N6Rch7(7fh_nj?{+FuOZ5CfwY zwK=lnELQ0uyTZq1fn{JtRk$f%P***NUGqrKVk{XWfJGA+aK}d_T&(}^bd)q>!dphl z{9>kqjYQUEr?);sJ%RWGN!N_J<4>n*NDsnqU=I3N zZ8VW)9PRDZettQXtzIO{A^PNIn!#*`!B{O&noy?2sGuv1*^2EgCx@0xzW!9Bs9R;+ z{N`8r$CLpbrHwjsAVJ|s+RNsCX!876Z8@VxvUrU9cz@`a>Rr%&q=p}Bx+IHD&B#gZ zI26{h6VQ2sZen-_FI>Jj%_soAXqsxAe|d!nDNT4VOx)n=D}R@pt5<#p-v1*%0$lh0 zyqYs^{4#G(Lv<58ptB{2K$XDdJ3azQft?fw$4bL%x2WU z9HRDqS;$r_E#cUe)M7MnO)d9^7&hpgdc2Ap;%O?ih)27~RJzctp&9uqJ~6$hE?Gzj z(oBh(F<=RO&v6=zotpXKK%s};D5FG8CKtRXDX1DYNhz`g7+@)=iroZ5vB0uHOHlnY-9;XTM`HD-4%Iby!rlMqjpRblre5j98UJkj4B`>Kc)t{Ib_koVNo zWN4Y}leiSL+j1A5VIU78goWz$x4o1~`JJbVUSMNmrFZ<;EROyU6U|#y8O~Mc%Dq1?{$J;Vy8oUGg^WK%}PT3OG zT#c49t=VQCaF(~2$H|fr3Z5F;(gKHpdYK}>UX6!HE*&lY+mne!ZfLJeu~NS3ePZE7 zEuEZtv*RaNe7)5K>*D-BTP=FGETQ@%kLlpKYGVmMar1No1PZawM6EabEL#ae-OSfj z%_ud6VOTM@*EgF8Py~;Y6^IP$B*tUX1ms*g;+e#gAPjL5@pqjiq*A}}Y@Bb(WVJP) zUfC=|m6F?W#p`!Qh{8=?$Ijzo^{1f$Hl6Q3J?xjGfq@l+;C`~A_s(`!2IPeOkWnp( zSn&jHjC3GE`4aNQ`T7y^JAsy^c~=(;tK&<+>D55$2;Yuaw06E)F00`|ti@Bn%l2z% z(C%;&PKny3Xmymld<5Y`%;FnYdnl)ws$^^6_|@FK3R?qfHKoz|)7oJK`JDC0m5OH} z{@idxSjb2+gOUsan_+Pfp}~ga&8mR-2wg24VtA!)2~u{jCnYB!i*b&@jmlXu8V9S5LRH zL0Hj-LBi0-;tvi(*AC1NQskkl7)hwSaQG4suvN_9^i})S=6=X3g^J3`8RXJ)z5;V4 z6Y*dljL(nl4ZGOfC@9ixvOs*p?})waxFfYSku6wj+V=`15nxCn02@|e3T4mn_TIeM z9jh}L9yl`&xI^taUQJLS92{9aXD29GvFdAbShj#lNDBu-KSMPFcP~!=OF4`pu~i4= zTa5e{j{q74@dhh<%w^nzyuE+#;s$d5+Z)f)LJ2bJ@G%2|zIBr0sAAy=MUex&5N$%% zUQDk27!Cu$C@dDG1!bYY6WUj=se;}kzk-22YA+2_ig*@iT8?o*G)B;c@(PYJMeOHG zHD+Zw6{&^&(EWXGNScGJ52gz!Wn*s<4)`qaYVCnL8)z>w)q{yw+W4Hn`*{_6%o!+nfHd|FEpHNNsnkVK94x@J6P;ySVVt%dK zzM+fofX0v%Q+Tc^-fK{zvtx-`zO2a^<%rUCI2$`jv-y&&8e(iU$IG(;)Ho4`6s{8R zwl(1X`j~5|ge}x`rbWoES6zP#QtF$OwVhGOK#7NqGxR8EkK8xz|8UF;#fC!jux2fu z=I*abKOHiF13LY*mxO=1dz$~2>j)5|^vgTVs>6awSNZ)ujF9!WcX!k2vv%%8^M+ zN&UpsNJ{mDWQV_a7YKWtY>D5zJU@O~$(OaU5b9j~`i>AkYU?aY%bMF^ln9!YscwDr zP?;@Qq)Ge}2aq+AYkOc@52synWt!i2-w+h&OR4MA(&uMi-hNk(veT*hX1pGd$-k%6 z;IOKQQuFTF7W%lafj?UqF1XSF6=T(|VD`oqaQYL<`s;HLji%lnFgv1reR@MN2z(~$ zfmeoxp&U_FJf%c5iPO8G&Zh@Ff+LwjK?6X(>Xbu_-Tu22MaZmMQv>Ra>~kX)00NOH zymM+mV;O0#h^K4*-vo*4u{adQ9SYQC;B+y@Nvqv|e1m7-XOL65IFohO+ixjJ3 zQ-FQ1?Q_7svM&7^!+*@A@O(lgbGU7jLuo8-B`|!+RnTQHO@NS{q>WAFd|ZJOxsIR}>p0C=-pJy`0C5+dNcO{D??n#6O{UU&0~pb>bCLnqyS!rb_(d zISiUNZk`^Xe~-ZA(nLF}G`b!BGJYI}#hD+*A|*6XxvvjGfStSWqn)QJ7}M$zdsNn1 z=N(ceIVr!LcHg4$l-&Kl1B*^GFZJjqKmr;VjLTk`>|)VkEHE#Luj2lrT`J?+7lJ>h zJJjJD*ay7YeG>o+<-Eam?mN!lcRP%|Bwn;U5no&Y%}?ws*R3T0@`3&d2p;b?(ErQs zU+zt9|9r{j8j4~Kg2q1rur~vCv8g47!Dh{68`56lFgolm$X|ixyipcszn zB;tFG4TN>WD**!nL~uatzt$CqP2~Une@7rK9WVkhO``1O?OeB55Q)3DY=>+bQ@c~% z;?1pIvIdD^{Ph8(+%;-56#zav@23f8ztx+4`nO1fj(S29IZ7PND=d%>Yj>s zsKZnRqaMq9I(U;**8_v3g)44U=Ku*ikOXnxul?6rLsVLKO8%ej{2C3%+xCSX!^U$J?ZU5BWXChUO(|*PB<_| zB3HcY3V=(7OD|n0cP7)8(>jh%+J~RI9-e(;-`wHXS&q>b&PS(7L^3nPXAR3kV>NGS z#sIWh<01{pSwJ?NjR3?$nL@y*N&n^N(TC1Fo}Zs@+^&GEZH@Vms^7&36Yq;l0E
y92mmmdJvG7VDX>>3eUm;ib{)$OJ9nxPnb{R4nqNZx;&iR;ulNfTrEDoSlb_6C^0x} z2l0mZ!hQmA9(i!GczqX6l+f#)<6guziEaybn0#_Gg63~*ysgl` zDr!iWbZ9LLD>~$h%U1#Mfg*s^Z)%Q>)k?EWSZ_kOd$Ln%@&RG4Ao&A^b3vU^hj6Cg=}V$Or3BsxD!bCX z84nT}rREPAnNT`zZe5bn1S)VWkW1O@K$PA&M!Iu>p^}J~pbPl_v1qWF(d^ZlQLwzQ zh4!4Jy~5%7W^DZL?q-=rtNevcr^z0HQ#;7i7;8l9sX4{2wDh+smQfBC}Q$bhVqql_UzwM-LUqXqzbK z1N)fXT~Mocb?)v*{{3s34-__&$Gs7hsx5?kH>Ouh*kQt~CuuugOa2+zyB<#|ayBDJ ztpMCo?mQl5oT3&>g9Ugc>?g#8pOcP@GY7+TTE*cLxe+@771e|S;V4AmA$UecxPT%| z_3P(iKY++HIMxD8ph8}qR9If}fF1rXL)h&8k!d_>>+N8ROudm<>vjrHe)mZ(c9NSg zY!H!J<*^}auB1qdbc};9()2AFZmRI=u0x-W@x=SXh2z~upzTR%vWeGnv<39a5Y!yd z9TtkR4OEWl!d!@fb|gyA($<@G>gb*?Xg!W4!*<{$nIrco&mc-P;O1ma~ecHBcqLp6J0P{@&2=11sZyCc9u*;;;<4HwVgK{K%&lg z#*VHBdjB)){4S3Lx5Uj@KHh`b4emJr z1nD8cdxpHnxk`Mg^NI97cLHSS1`ZDmHG_{I&^Ao}lB+#*H0}Ihf-y7VT(H{K71Qus zmL&ek2ZLY8Vpy{M<=J`39yhC66w4S31B2i^J%lODvQ?a=7^=F3gehHqagT|CNz}yG zmHCCsN;N}0pNMH9$Lsj-NBp_NUxCEP4>!`lH#te$k+bFkf3`r$leiC2vz=#?__QpM zv`;Yn8=$edtdIk4uT5l9qbioqLR!P6_VoJXTKNifmBd5#-DnO2o#yR# z=fw!~96?&{d~#II?V2JR&#haG@!SW7>*ppk;GdFqF}BOY#>6HPATqoK&vp8X9Id~I zxa>^?-njPHzc=#mt9Pqy2s7)7wZ3^Y5oj1`?Czu#_2LHf_2X7qW{^QIXu1E8 zCW)Woiat;H~=U&Pd?-R(hK^ha*8w`?11gdCj{74Okm#R1H%W6Y^C6QwooiY^YL5l8$c0 zyZ<)J;S-yEK2DpbV+QvCQTP6>8M=k5IcmO^iS;I^XS(9iZ)=b25J>B*L9=F_22cgJ z^I|-3=SkE4<46ERi{1>Fd1c7#_^+KD^hIgEA?b?##jcDd@R~)LN4ze^I6Nn0MHHrT z*&PqNJ{F0LG~vKpXA6QvxAf%j8=>QL&~bjLx1QQekO&x3lW<7l{$qaoB&w!4f4$c(+ChlGF&5U5vg2>~8@ zOZ;D$BP29f&xQ@#c5VazF)J>9cMl~=O*3KYbvu`JCYel5RJKev-y<7dZl1OfOilAh zT9x)MY?hni8gD60VS^PSJC!*WL{$G>Ql20|MTeBB_pcd*vd~L2HKO@b8)v@aqZSB7 zG>;nxi}V%toIKii!q0`_Cd1H@qCui^eOY`ZJORDB zn;UoqN6$2PFRSiTYYE-3dTWg$D~A#AE{9plE^DhV)uA6Fh*r7*hstje(JB;E954fw zWLD!Tg(H!N=TL{tozJtEp%t?@w-2X*-RlySpltI=tZxMH!~Sd88%tJ<)Z05C8`{j-&=b-=*a3ce+}RSfgz&gGtpgZFo-Q|>r(2DBw9mkM zst=HZowq%{jduXKvy5#`nc~YBSG#m&a_8qQ%aM<|MOG>Y*+3xI6zG!A-NbOn?!*Fz229hkOyqF$csp+6?V1ce zJ!elfe4;OEyCgIUqh#-{Bks~Pzq3QVNm&od06n_+ffil~R4;V-ss1}yl?x?{r|Fc7 zSSw2@asIc2kMG{eK!TtUgv_`;5N;1%~rGo&u;M*dgZ7 zAE#{kATy871X5;qsvlWd0aP>?n9jJbm>@%OF(*dG4H9VFsMA0`_JpGL3_BWcrcSKZ~OyH%7Vy1+U zBBJ*R>y;sT3lmc&pVW!lZf=X;P6#4t!w&3lW=CNQ35OKFCDDn>yL|&F8PRLgzQ_dO zq9=xVaVTzd;W@W4TF{7s0HEQXE8>0L3Yfu=YKSRj#J~SAs5ix3r-c+L9`rJFj@5GU z<~$BzF{EWr){q_x2dmF^k@oKsYi6%t7JH~G{>ys?tsmGeUgs{c#}>ogA6Q^E6Miz< zZ7nSUcNf2GZY%>x)T}f0P!bMgh`L%@Y)Np%ngx*tMp8=5g=3ffO)nz7!$0gm_c^1g zxT#4lv!NP~^JRQIc|+*-g9Y7B<4B(Kx#YA{Tw(kYQ2Xhm_XT`EXbZzyd}yIN`w~Og zH&fYyB}z-d<3)x-UB)~#j05L8y9_7U-?|es%V}sM89YRV&~#(bE|w(khuAd#LbnGw z3|sw<3p|;8UE?5>Oopiv{`dB=L!;>ogpZoVk|?OGI>S77%fy|@u0fYoQHOck-<7i8 z9L$z9-_QM$APQQl=K16~YYlUaiX8CI%8z+cdVe_Y|2LP#+^6{LGQ|P*I!0&?0bg}8 zMrU*#w)yL+x_kvjNacTFiTFzawd2Haz{91-pR@Acj@|Pw8SQiPb}cE zxP_^R4KC_8HO|%kgVV4jXu0_d)lvf3#ey&?dVB9ZgU;j;Z*|RzlOz8V-m1IRh0egc zQX!qX{K9~ti}ARjfOuQK*{d|+kDr^p%v{*NUS>N-Y8<8jjN@&I3R~}dlns2e!B!JM z22-7vvYI*Iu++nNu*XzwLBQ_C2X(cNlo|7^MB5lusMP(%5JM~`^rwoIjYu6 z{aKwOtYZ3qDEsTUsMGI#9G@9L29QpbMnXYCRJuz*(x6L3knT=tDM1OPK|m#?yHmOw zq`PBizW2DhulKs{{(g_oKf5!#%*_3~?{lBH&UMZ~bOj+Q9`I|$FY+&{m+4edz61?B zoox3qKDcZRN53==SfzE1IpOapnyKcYy(`-MerZ<5cGlc>IBVv(+dbJx8u5+Z_Ojr7 zb97ek>-DW-r;+ZQ<;DHegOaiB4K(%?X|zKzF4! zIa~+c85>CJV81EAQDK^KL|142^asyZhMEMU(HGLZCDW1oee389N4JrEioB)UqdEts z%7OzEh$IL=#;i@S7XrQn0c@xVg^*@fbV*2oXo6bu^8qM{j-=<1*q_1gPmk}1G~QcR zAGuiFvBK_|rKO-&xF6L^Bl#PmqGCI3jQTf&&263762;y2WB!t-Sw5sZH69q01aV$~ zDgx6~oV+7J+d_AlA7E|`bX{krFCBzs|C*<2>!J$lQ~F-D`-hF0P`(K(o@(3ZuzvC}+6`6C1ll$Adz6 zcfR_wa@yTD#LTysOOrP8#v3~gY`AYOW&3b_s%<(OdQiPox>Sa#%e(qsxDTnv`_>)I z>F`ktbrbsS9#nop<7Pid^IQLj#QL@tI3Qg22&CQk zBdh+GfBghjseisYC<~@YNR*gc&?zLiay$N@2IPHOxxOBMj3UFLsXw-Q`xCse2qM`+ zW~x0)z{yifsR9?j%gH0L@*>DwAT-w{XH?);m$N4Q+W};XYe34gSg4oqhd+zZzz0(; zv^??Y$uZ%-7~;rp^x~!gwE-G80awerKZCX0=ypql%v<@KG*0 zJ>Cr@lN|fIGY&nTKdNImA6*>n>zy5urZaZLnHQ^hPC$jwNXXmf;g!SZR=jlLxbwZb}#95S>pFFedlS2hdcOH zP|?MOTk^)kaUIGj8z#dh7`bnvnhfPM6`815aZ^J;_`z{sZd&Ez3T>>>e%>7+y@dE>3OsSM^$J%CIl1PrJwbe7D+Coi?2-5%;mZ$Q1nIp3wyo zC_-5R?JFPft>Cfm_~>JLJfsby^$c8N?jtA{pC`cYpm!|WeeEQOi>m@%zr-EFHZ^HP|IU97DE=zGDZ$`p&jN9 z7AJG+>`Q59$(0hU{n^8fWo8R`eXr~o#JX)vb%C<}rh*}?@Vxju(&uXnRf4daLrUz_a}b0F`AAf@Vq3*fV4y zdybTgPoOLhPBgod|6@D6jwGpBQP~$EHr(q;WX+X2XXA?}<3Y!@>#mcs*1m12(WI58 z+Pk1ZZ9C}AVB-rCI5n)R- zZ|&?PSuf^a49vf_@y`+#nIF=G;06SXMk7a>=DLba}ZzYeJ{KZ_NPA_JRlELd%g>v5uoepsu&m?{Jx|!^*A<^ z^>OgqX0zo@prBU{^HCqzdD8Df(>fpjhc2w8Lw-40o)o!gc?E$Za)AOw43iHrZkoa* zQG>n^0|g`g@c)M@b$o)npQC0EMojIw$?aom%CytLypJMAI}dui8nDA8HK$T08K{&W zJlmiokB`I>maX8rvsY~-AGS6Hz%v#>a~H}1lz$%1UqV7{h5w71++>ao&^yzx@ubiB zmX@sqOF-_%P7-oAz;QdFo9zAP?X89XsS4A;6~4isu0_Ayij>|LKF*L|miq7C;PnYv z$=C^v8fdO8<}yb*%3Ko%Su)f^$d@Lc55)gVg(<>tMSd1RQa+DaW*{< zrdYZ-i|6^HdJYTc5w5!0+Ph~~OOUy(A2FESugulMUTL`L09W8r9tiklmptML3?md= zD~z1>!uoGA)OWv+9{Jv1HBn+F+KW~%@TW%|wE_*_Kw{X5)vO zzxBb`KH;`mBoY{bWDn=%80BRk2Fa4s{Br5hUF6UbNkv?@r`W!gLS(ic-Y!+ZlK` z#F(L6U^%wzOkOvU_x{eLrwfVrIpfC-bv~Ar zYumd-MUP@n57t61&QBYrh2pnIK|S?YiAC5uMg>~AkN2zw;R3n;;u|z_{G#ctTGmz~ zg;|jHIm&Bo<-M>b(gR^#Su2|(g%Xzc5R`^e3V6Nj)M;1_la!6A@9AM5Ss$8IW$=$| zn0d<}E-p#S&+-VBqT1oC5yN()k60yF%hzIeK5v}DlDEM?kuktHQ#uupmF-g8-hm#Mr%8ps+>e6`@lEjTluI7@!4+>^A=V zpHxT6WDQx=OOgDIeD&Fd@3!qX+)lGTTtp8I-GY-zqRMnDd{6^r5i>y9om|!xXdboeM#Ne>`Y#C$Xc31 z@yJGcWq9v}vyfidGxjy<_rhn?z!IM=qi0LFQd{Oi>`(TkpJb~OZ&ip)tCmz z1)RzIGdx10?r5m%lx{rGEOo>O{RkSsOsJbINv`1~qvF(@(l#ON2I3q+2?q<~rTr|) zt&0S~EFx{ZY+;70wPu@`S_v&&wD0WP%`F1;8PLbHI64S=)^RMz1#MA5yxHFWBF-^7MZ4{sL#XT*=_Dc@Ik1>e5&F09fyRt_@2oNY zaUQ6`sCj=+SU{BL!h8zf%KnNC**!+v(nq4yvDcM?S$5_Yht8eiR(R!+V?7t98Df^c&c^%(Gy=cu*_)Oe<&v_7Y=s7T!v$K!yD zRZUs6ak0`z?N+{T&iGLq{x9N=SAzPS4>biqG2_$D_VjovNPWt@>#R+WhmL%Svh4gF zRl<`sn@gjlu@QqX{4OYp^7KXZPg2R%j=67A?6Wav!Wf!Jki70aL-nIIoDLMm(I7D} zph#t0LLmJ~bLkVP9orG@QBm)XgGXI63xf0XFHP(YntXZ1Jbi0e4cQOjbtMKvgs68Ck2PmEP<|V*fT0F|l_uIjM>52}x>wbWYQ#;JX z!>Uu}14F}{jW3xNiRZPONqmqEcY^wdfoj50-F@^oCZH!teQpJ!bo@z`o;Izdn6n$;@*W_r6I-lf@cuR zgv#SybRtS0c1tnt-SFkhUdm~K8-8N3wZWfqzUm;f>sUuaqgr5}%`Jn2j#}G2%%+_; zBy;lx?AgOc&^aO5F7_F%=P@X!QcdUVcnp~<<+oc6Hti-?DY2L);ZlA0Fo3&a+4-vm zg5Gvx2)ikfIg~~1ZoBH@NZRN3tZuxH6%z*0Y&w#lWv~P!mfwl?HZZ_on((6!^JmDB zzM#n&_0sIAtujZ)^Qevw1`}J8HMnUqks@8>qm5QgCf=c+K}Kn=iByM(Bk&-D`3+A6 zm>U)FlvyQNzon}+QK%-sA-DSEHlWt|0?J5TOAES4oyfx|7M@B9oC%Odk9}Yk!K+(ao`23&IBJeDSfZ-@;4QNX#Ovj$@CXeg*e>Fi5?ocwBlZJI%4Ay!&Do%e7OtJwtnzhfG}$ofpdI zq%z3qNae)w;FFNB>g2O&mj(b6thexe@Mxzc(s3m%T7n4&8=IKJ7_m9&IODr(9S7QX z;pA5CV%9g**N$0VYE{^$u465P)44o5a&(zn-g&25{sMosW=uHebh5&uX_vS?oW|`6 zvXeJRdo=?x5eMj^2*`7!5^sp=faj)IXoTpug!>4RZlHFt1w0db`MWl8m0Hqc@E zjz)DL0e!-|6Ovn+s6d=vPny%C3%XqJ+pmwT;Lw9=Kug=LDXq{tdmmq4Y1k~MVONqw z5EpWmo}T@DD+h~OJ{()y3}PX57rzx0hz9_trPbcHtXHr3%p}qE3`6kTG%F9x^v>IvMT9>R=|*n^hrMsgy@QUL1C_p+A zoFNF{2EXNQgld6EsfpD>AeJD&4p!N^()Z(esg=n@E&=$@&WL4L6T^pKo^N}n(WuKCw){E;-nSp4QzK)J9~N)`Jw~Yf=@I=57U(|8!!<+9tnynDV%E`y~HC1~qH0B`npf!v5%Zsnc=sgW=74 zFAi3pP9ArV$w%WPF5|LAocS#GURas)8?( z4U9RBRA}5to}Pg5z-M|GneY1se-w6`#jAzplaj^yM?S!wy`hfeve3p4S1&RS1g?kl zXYn_JQ7FN%k+9c?1>Ma_U?N`L_{MC^$w7&ZQt>065)I>d^sQ8fttncj(JG3OvIc@$ z)dZLH&-262jT(BkEgvOvpa{5BGV`?JQ1Uaqe)f(bHwf}_u!`aOeP?3MmtDfVm$s$O z8@It2pF6M-&Tf(><>7D3+gEXLB%^b*zFF4M@(lOXdH2*@x;zsgw+E^K9n>>p0u~rC zM@S0cUQ&^J0ev$7SKi-Je(2Jj$QQO~(E~livR-QQ9nAC)uY;jACOhvoN`uA@7&o{s zg26V-0xJn-dHef>e`*gh=$(}?>QApa6L`_8dzF%3Ra5sxlVTvC=h@@-rgwhSZQft$ z3yVWx)2s|&jjzx>-i=&^$*z;;7ylDq^m0_7E=FVLvk))ZDJiB{w77USfETdi{@DmE zvX%~+TnOJ@>5`$oAp8SuZH?l`fj$zYb6z~_BiG&Wcx!|`iOK@ay3Zg_##WEfa<_e> zcAVA^n3P${S-T>=Imv;X0|HEer+74VG_)h2v@DlNDaFU^!c!=%Yo4LD8p{;85lhGCQ-4kcL{?25105#?bP zpG|^pNhdJBG4!gdTb`~F0+e}HJycvO!C(>ZE6MKODwcbT*scZZH=M$Ja|%1u?&^0Z z2*mKe`dax6xG5bU@5|QTSB^_L8m^MU?Kt0McDg**6YI&f&~b>Vo4(9L%_HZ@mu?lq zd@U79E^%sa2Kq0m9vz?5n>+m2|IqI^7eX>}(*EGpQmYajy)E5bRxG(TP{N|+win!% zKP*@@xv2nqFXGG-r-GtpV{{G&0~j?t{Fz|YQp0WwCan{+FAXdGhC5(e^iKA>!+Gs) zx7FG6vEzYe_zgf~0c^$OckB(X3JC&mk$x#|Rg&F9CZyxqF>VL2g=)B6g_9E|{HbiV!Tv+OIFBrMdM zY<@&>To#o1hTW{8^yTTmS)dbDu68}P1pQV)MFb`xgS-!}mhvWEfYt;~omGu zzA))84=JQG?27Bd!E}Ei%+dWAOhK?^pIRH%AM+^#~paX^e>OdO^II<_gTc{Xq^Q!>EHjdZsx+H{? z1}8+-2sMvu3fW;Gqn6D#vCyk*qTDtaQmhKx18WiE-!RIDg~%=5D}KDL3}xxXVKR#$ zC`BSEwnm_%781!3pQ;}b<3VCVm392Y>Ap0wHHJzyzEOj&|dYlEG9Oy(&XhpMX zi2xqS&hb)WLwC1=&)qkYpoE_Q8mxzLO|qAumVw&Ix*hs*0dg7!iz(>_rCEbQIpWzL z@8fUU(%!pAL=|#D2K<3=xUQ|Iq&PRd4v84PGhd~IBk#Tg1kjxV0Vyl_(dgaQxLbr5 z7pHjcmu^DfEZ=~>jGi=jcc58(i|#xMWR94+M8$`f)t3o)0l0LY9GW-9L?ejT3O%L; ztt^!wdpn19GYwMgV5%sH(y(#-OX3jA-}D``ch-*9a%&oU9~kvON>DoBMhvbgpj#EA zj-n#SNQWI5_j>=1E?@A0jk%fckG3z7Y&w(}7BOJ>LmT$t+NJ>0)AE~}K0a?+C7>PC(>mf5 z<+XmeRG`!J#R!F8p4nOyaJg^F;>1o~z+o{cI8;>YV`VoCbeI?mk@Hy9?;BII@N0@E zFEBq#Qc4|G1a*~`e*V%-VnglR)ujp98 zMV*)7PHG_?ZS#pz8a)Oz1wTXA)A$kzyaG|tyWJ|czD(!oiS10eoKKuiW0QWs0=mry zln`%NOqXZFRLQkeoYnuHL;RVB5q<)8?h7Pc1>~5qk&e$51CYuI09mx}DCs=4v(i$p z?#~88(DFcKo92$a^7x0XA00areCu#MkjF*~Cy&Nk!u2i;uwqY@@}FF5tC4WHG2TTE8kutIojJb3 zMax6_Ja(}Uj-2{QDi5BixbF8#MQ{e2mAP+}msUlwo;OcUsTC{}n9%c=O^y|<-9)9u z+RMbgcK4yRH;30wqQ|-K(wk^V1A79GjXY`|s65gL<&TYq>%OQm9pwVCR4(6=UL5uV zfp5P-F4P#vtV%~+rOty2;^)&ZeP@^Ixf6vp$#pfk0={g=Ev6sD6b~XmCgW%eDCg6y zbz24cw?B}G%#ZR%%;Z6+M`#BC=5;K1$_%Q>@NYf-RM?nUOTF(}LRuZiM~a3yZxhJZ zqL}{z<7|5P(!X9oAcV?8(-+Lde!iD3_gV4V>(WMYdvs*4Hzhi$=d+)r`yv=}x$3jj zIA_*kMN#vBKa>nw1&tKGDgYGW(}tcB3k%1`M7z87VD#FKXqh!AU`(mJ?DePXDUed& zCdX;}BC+&zXXy?!Q>~vNhTn;xFI}4MHGWMG=N8#s#QB!TC1H!&pz+}Lm?zQwV!7IGilvq! zU~rGsob!_$&C&SBX01JXG530(^XLkD0HO!zOmPkLix)N>s=SsC`1<~kp=2>B*S*dX zkVhrSWd_`hzfw{E%XaYodCz`k^!|3wfE8t=NFEGuKxJ|Vqu_4<5^%og+QA0%ERMuA zzbkQq`n*#cgDqNHU!P{`g5pjljpD5N@sAw(>D+Z!u*p5WP_6`M1o}GI0>&bR%i(%m zQKkx5J`K_BCvY4KDsdPfCp6Ui2IYa`)3facgwb$Lw$GJEANmMdRf{dASz*sEDv#gt zsmLR{L^iSw9gl^?#&9mlW~qK7*C;k^!WDA$2bdu(Ex+cfd4ZGkRPK#o3+ldhEWolL zJZbgZJ80bHt~T|wZxnPpeX(zJe1{@n!Bo1WK9eLzQDBhsAS~@88Wsm z*35XKiDxW2q2Pk2JQ~gCwmL&S*>}M`KiRJb)stI`B6YcTYeT9<&e`iyLsc#azr@6? zOAnGjZ}dya30lB<6KEXP81C6S`bPWKno%W<$``YCyjHN!fMiTf89w5^tvqZBw+#__y1>Y_Il@xI4!5Q0*Nn;8{0 zOE_0>$OCrf7+d%5XR0Ji9;_0md!5RvqWGiGhoR6dAC4-Val?JblhyeS(+u1(}_YLCAhA*n;zqu?2j;|#*_@kjFQm^UXdL_pm>3-nPT zH=Aa)HQL6wUF~ipbECu|7^XqPa!*{Gsk)nhgwhW*%Mz`p9Lm%n1u2*)lSEqLFuUD> zTdt>*4xp|m*Hpn@HS^h>@a6^0x%+Ow@LBmSe2=^#BH}fYXN98uWmvcvvGYy;aGmG# z<0XSna_^q4w$E7$&Qy!}<4AxaTjW?sTG^CW!Sz6dW~uF3zDYLUxs7qew(Op zE|;PahJtYjAN4vi66;&WPpRtRO*a-RX30o7hF^u)Rl5VX`uyZDa^mT$zE2T&K6Fwc z`97>QM9SHj!#U4K`_e{=-S`AAU9U$lDueA@T*se{6_-t>0XS3$Xm|Zf!KZZ z77ruZ1K!-D3t6_IW>9*^yp*P~(b)E`)M+ou=WZOQVd#1{zh;9Uwiwu@x1buMWV_J} z_AOb4LHz>`CM$;3dHdH2X&HZdw8A_wlzon&K(+}k8Q0XNn?|!_+f7mT_8LXmX<^yVVgYt6zk0fNcRvrsR_JDw7ba9t4nQ|^~Oo0-N-QWPa6 zsDTv3;Q5>-y-4;BD9i7#sqU^h=A8^+IqP%%BVBNfUWQ02fxoP&?FgKJ^hhl{ur`7`kK`Uu=(Vd8)#uS>sLtN^sxJr5`8+ zyy<#YqjV$h zN*IIKSAzM8c9lK$6b?FI>At19&)}2#MNH8W4`zWrMfd$Y)+%NE7h>BVDf`;g|FJk6 zmcpMw*LzPD!$3YB2J|C=DBCNF$Xb0+R5GFu`{FPC9@WteK)qu1R?fUplmn`pb9sE? zko-8wMJjQ4h3(3l^#mSS$xn~Ml|#WrzqMA>aSc!umvC&BZw+wM)mt5QT=Vgjpw0F$ zKFT^O@?ZI&#Y=aNmgqgS9w1<773}XOc@@kM9DE${==qwBY*#maEAR zlf!1{AI+4tK9KuZSiiCPtF4ou_6M=4NGCjnd|xUkcj5yWb$InkT<^t5(13c4!i22!*A6D5xdb{-g(G5_L;J1&R#I#zreE5k-&{;6- zIxKvA?~07%9j6JN+D~pdwwV0(0J3+TwCR+W@&zVe!4K#U1;0#6umd+>XKxq$dC{1_ zp2(FLN&F37{L6Ry$=P}P0uX^v0H5Y2-+tkYG?FwYoz+lOCLIQ0vo6m%s_SY&CnkK* z!RZ=PT1LBB&JD;q!z<0tt*-k=x9_Ckd3Ht3oAIN=g}nzau|&mP`cMmw!2cqwae`Ks$z=d{dfT!Gebyk-|+=-Ed}n1 z3jCde_{U>V;~<$aFH1&s3pm7600@^84On^H%dP@x1OXDraF5m$Ni@cSF(g<%n!!~o zk-`UbVB%K`mnm?`;h1?O7H8GF;Vl2#O8@+^Mvb2&zICGN^}NwrKD}-W-)UYJyu!(I zIbi z6!z$1G-FvmxjPf>K~A-mx+NN66Y`W*l|KyjB5JlS3VfJJsu{x;o!+;gAD|&=Jqk>2 z=?8$y6AmN{CFke$UQiGR`- z?Yh^FBZb$I5WKz9+9+VNB45hythN2uE+wJHaLHas(6X=)0q9H;3_q$PQh&Meh9_TC ztYK$Z2mLL785>A12ZM|vCLgO2-}(dYnC|u&&xUato+>~mBFFKcKblQlvcc5746EYv zdY75CcYY}#dl2QO@X+(74;&MgPj{6LGv{&*GqlUjmfRuUO0o(iWp~ay^uHga9SAcCuwTx7aLt;8u1I&?h1n|wxSso?{ac@pDsPFJh3z8vFqBa zOw7>!v^KtUtY*gM5;j>fSAkI38qU8GQ)H#1#qLq6K!=`iUDx$0-sdm~RMjnx2Mbn) zM-6FjV()>Y;t4~bzx*Sq2X=r{%d%|kr78@0wj3z6gvAJ6l+t80NRUuNn0J@^9e(-1 z&(Bj+!BBMvAnh%WJ`rcL3PXlg=^kC;kLoTmuquw` z585QEwSx1~A9=l;QFWywnBNXwz5oVNm#zuuPO*9ZIgv#ef{=(^81I|k1&%}nms z@uP_tm=x7d4DHPyCsfpIWe{kNc52&+Lb3)W-(OH$dV2aQ z9tzoECBbWBDB;we5ZFKd5dyYo4l_@(Tck_pDioIYJeaeJCC}5Xb>lWOR-Gx|&>f0u zDie%l6h+cdqC?#FU>#Tk0PtiF_NAC0vD)SBseJ^V1_^s<4=j{)RgHLMj45_tpY@ ziE9{cwwP`oObt74CrYT0aF7;aqv98mbu59MCmlwE&J^Y8_RQ>j=S$hsp*sjS9V>F| z#QKDG!OO=s+M#d$@$|KVUXrM~dU)S)S>Oekxvr#sqi)8Y9Gv(@THvRKC^Ve?JPD~G z`n1$7PZ_vFnvUJNf3tHyq7D*KK` zafnOA)67t%e4Q#mtX#<}y8hQ+r)_@C_40zoyB+fIF2xzHzJDy=OImZ(eR5b*lMT91 z26JDOcIu=5Q`Q;o2ty>n?kLdWc-Eq#!fP2k8(HD^3V;xEhEbYX5JM|=S7(ECa=5CF z?|c+6iSqOF(;Qy^&U}hhI5kC?ty80IGL+SesJTR;u<4L_8YNnohViZevsRMTgqyu5 zqpOS6?YLd1SIsa!?WXjBipPPx+ls8t*DJa?_Xqb3^fWS;`*D9=lvgDNo+rNNf|-0@ z!VnQP#H7k;whPNq@G=B9t4{f2_=;iw_# zM~uE%X*?)HAE;#qJkx({_7{9=2puH$-`lbuTd~=L!IQ3Y411gK4YeRcFMxJrPY(pGz`SN~Ya2 zt9J);vHdMat^JD!W=*vBGI+u8(EFhAm6`MNcka9IE{Aa4kLa?stJFbrl_c|0z3hsy zb!-bqZ*a)!T2@Wy6Hj%ks`&Q3soZr;JeT1 zb-7HRYP)TFQ&~bs3KHFtLxnE5H>Rp&!vqdXH>(il0KSpgI*D~y@$lHunc$c{-*i~p z*m49^tFd*_{JBt7*&B>W|ZW16(!O&Rf60czh#gpv=;#M0q`H8_~e+H*ZRSKSAXO_gW-c z^UT_9@oQ$`d;lF$WV^YVXoVZ1V!FSb&3(ZqN{pW!ayr`Wo)g6Cyve>Fsp76|;k*}X z3;yqypoCLbiZatLly2*|<$GOxTIYFyL!`^L|a?6C(<;9j@jx1Z%6RxR!PJuNw($XdH_)Bp%`*T#64{NHH zC@0VMVv9;Fas|&<6e{#RPQGW_js{$RF*<0_@tY%HOy3g#=`i;6)uYC0$hr~@h|$?5 zS|}ZEv5MI!QG-i@Jt5b1i#|$|qkDY=+YmzoCwd6Xq_DS~iV3amC3kVT+OYN>HmNYZ z^m-Iup-0OVkN& zXOPD*@k_49+>MiS6x>w_HrH^CK1;yea9*I#?TJ*;{*rk1B3m1=6ZLJ=7 z?2w*JAw0Nekb4Y%n93G9a;IeC>*1ljoxQ#BV#l@B)%5{6uBp$Lb}KGLv&(dmpR85l z4-km8%?gi99vj28`yMAk2Ztq9Po*gA3a7xeSXrAsi!ya;MpPrL-FDxptP&<3`GoQ; zl93MC*%qz@(1rG(;ATDkGLQ}?vX?^E_xlwFHKq?AxNWTFR?w4HR8$;z;cDg}EmQf7 z-;Mpsr7B=v@s=c+?F<&4Cq*^ImUz+%P=X)LTa7He(eg8k zFBWsBlW{$c8a@u)ms28dyI8swH8!#9wG@BJa!1Gg6INq$`9=k7{Vq@mD$BL>cnUlV zH{Dh)&!`0Eu66e;U98xgWY%2V5HL8nEwHUyqJ1_?x)veJ+6&rA4-S{8v}9|8k<=t> zAr`KwIc0~_`#@ME5aqtQmuKCVvoQA4%8XZ8QV+}r^3$YlUll8fn+xHZ`eE!@b=+yu zJ6v-S0=7O!2I!=z3^3?m^|;%E2CC}4M49oc8>OxWo)wZ6xy0qxj&z^hn_ov!v(v*4 zX%zYfC}JU|G66hBL0BV8FF_~rlU0@KPoy>TO`z96)c1w_%qoNA5r^u%NwF-b{TIQb zI^5MIuIVhSM4NDsu-(W3?M_LicMeNdzbbf)O?tG+YNwzN*{$hZ9wZ*D8|r*U>Oga+ zdN$V?tdpj0kMw?dfu|O(@3f%yA=Ue79)p?M2e;i;A1B(k6la#N$5<(2tYt-gxP#d4 z5wk}d3E`a!?mvhssU8F$=-^;J=n-491GlaJbm~%YGH)mcm@$)`S8|A7VgN5c%zL~L zth3teO#+|dB3|8u*Gc(BLY8(K-S3uMHZQ6ZWh@Q2TsqDC*W8{`Ze7L+ts<;|r@XvB zH;+aD0)!sj{=w&%Bhd(?qg)5CeZzykd@zre!qnFs61D9$FOpVU5Mbfu`yNndmk0Mbc4<6XbE zo$NnVSph@#hRT)_xLiXMH3IBTR-DguDn*7hU9wp{whT^MrG!%Mn_OtHIf~F%=0iGG z-<)3{aKYVLQ7^asvRQKpophX2k9S)9;5-cP4aK?}zDY7-tjYl`YLRqM08qlx-AD&v7fo-88?(Gp-&x%ap^PWB!i& zDS6AAl+yALzY_b(3oocv&C3vH7nj1E3JtAztviBmY}wdgYr{hP6YtTm7GEuPd@%0{ zb12wuKP*6KRON)Zoz3Nt4EuIFZMs{Wk4On-fmTqOH7A1 zyHvC7+i-fZIP9?cML|HDv}TsTZOdP1;CLxM(?WYst~2hw2GvQAzun+d73{6kgv$f- zY|U@V7B1uClQt&97vhGAZpBmEeq75RO>!u-VLN@Ym(hZ|;bBG_V~NO170wdVZk!M@ zEc9D=@}Vb-P=1ZNf=rOkxpvd zrm{?+(@xJF&ZBzL${sDdgAd)VlHLVE2UMm zozncS_}Olhy^FhOANh(9|(NRFyctN5>?7$7!I z??ws7tv}p#N8JTsM%30$vK{=9I&~)W!PKt5JuQg(Y-}K?lEit;H5dB|I%zc*YYxVM zVcxJ7w6!swDNuoE%}H5b8_x{6!L{11bCU1A+h4?MKX}{0ckcf5nICKUav7$#FmSm( z=TsePtZqyux3g9}il?AB=?(M98m_uvn{X1^2@G2y&UN%%?228Yw3sTCo-RLBcRSNh z?Ac4abgjBPkFNNrV=n>P3MPV;Z>{A{AG2z?9jMsWp^<*Bs+!aaIqng|UGY|NRjfH` z>I6RFmrR;x$1{Hac>daXwCnhuz9LLQdDI;a%=Gx4a_;0jFqji5@GVc_y@IF=w}c@N zq=lzKg|Shy{9^7zJ2v9W78-6*N-VVcDr)tY4*+j2N2>zqEnp|U8suP}Jg-X@5ja_< z+Lu~N$W$wMxl%lCF&LFSK54QP=axP;HfB2~Cp7#fHO#ijVc25iGcctqg^v{;!X8C2Y*1C~ zbg2J75RT871AGvyJNvW{{RoL@*jZvpk-{rT&C`BAIoRB zX~|c1!kk<1#-8REbKVK|7zDJ7ab<<&qev5O)@TGSBonxh0lMp`21 z$I#jt5W-U=+}ILLx&_?nkJckb_kIn+ke}~ShK0auv#zL;UCC$fFt!|Zt8t!QwMX+3 z*lIhp`JaLQ9;snpFg)?Vg+C?duzKlCNj5?eB`YnRGrsK`kik%N|8yEf6CjrALDu}g zMz=qA#!C#TmR^de;Gtro!1BZ>m=C3rJ|LInv~R@sf1>frBmLncE80L|0CO00RkjjF zl>{-OzB;ucHTWqQ_5-bdfC9){V|^P4A0(>(i*$kdBOnewGq3Bao#%cF{sJ9ZzSepiL<_v|_u=V@zQlPw z?H3v5GyrSV%Yah+>f8PNYHK=xQ4;HxeF(nyDGfZMy0ha7IV{4hb?&A;O^jJfd0K!IHB26pH$}D# znh|8YnNw9FQYq=(9ts^6VozEd&Qh|l2ph6<$jmGVN|yYGPl0@EL;&^=mdEhn>%D?D z(#gi<-P4K;8 z&8vA2h(k&U_xZnw1JddNUF6!Ws6Kof{C)P_h?(WT>;*)Xw5KIgK z;$znml|Zw7(`#qdQwb$l+WTK5lzk9+hg^je6|+x?kG$4{tXwqL{t%z5U!V^LIfLiu z6p@0<@c&nEf!OfGf4rC31a?E+RComkrVnoMe~Bz|r9;a+Kso^`{w&xkJsJ*713Lrk zU$#y-6^U=6RkLXvz6Fg^1a{-7yiNeFIXE0!K+~fLHqdX^#`<5bE>!yeNkM2%ZXz9c zs!xwmg2SM&1Mu9LVVs5{)TBtYMRw)7*Y7(>r2hBd7gJzo*>whN04CSGZD}j`m;|NU zN$Bq%n-}{TG#_gn;}2}%^m86$xad=N-TS_;6A!_h6PI3{@l39I?yp>*yy%wd`1H|2?2zNIkb zn5WKUQ{;N+3J>S1r6R%MwasTLhAPP|FT@S)D3uTGvTb=zet2v+fQ%ARi~PY>HopU{Qb6hS)l@%97X7-<=sR&Y&q4tT4XN~ zNKmLeVGvrNA9ff|e!uo8fLf6X2q>tzCqcc=08&Bh&ho$d0DqVY_BX)1%2Myv2`5p4 z-91oTB!}Nq1ee(BDDL{7ivI7b_#qAc3ZRfIe>1e$NVJ<;Fyr4;DK&&o*pnXbSn+S` zv8MS06JK~9V}$zqz3b_N9aO_u3d?oelakaC%-blVq;rR*L-%X*PjvLLsiCT}3KqQ4 zbl~yGv9HL9=zax)oUR@-_b;af4+dh|Ob{^FiLWh{L;}`cq&QCozXzNO)pu8kI>mq2 z{_21DM!p$bTJ7QB&qA+wr&9xcg{u4bPRCBH7BxjL^t7e)-tM(qr|9K<0|F1{pCYy= zOx!=-4y1GXGVh=KyxZnAUh}HxZt!YfmLd6I{mflP;r4fV!Ag?*)Y1V@v)JKzm!2| zJcpKI;5N#vjA%0OpFo@X#p49`Tlg{j9xbHc zx~?8s>GW|nz!wpjh-`Beypg?xOyb4j1jcT}#JQVLBy-97%iJ=v!_xA}OCFt6H!3OY zy!4p#d--TcrK^j#0)r6}Q$u1u6P*$(B@uY0?Y(D6cw6md`tH_cl6V&vgF*vPTB-&hV2kS zpSo5!i&E$Y+7r)PJ_^~CyF0g7?1$(~rshbz@nDycbN5 z$H*?c{`yyy+d~V)2Ea1mUmkH;gM@6<9yG-XKA;R17WI)la}ZS-E7=vRion-BiK0s2 z`t@}OsOo9fhh4;GUzsfF%G0HUw%y1#MPtxi@WS$(Tv|%G}~G|EF>%lKB|{C=w=g zJwUpAdi!x?UMFy%*{zEFO`S?!bi5newTL{=&s4gNcw9oRx$v?je{N(?TA4?iT!j)> zh>Fp?=4RleU|Va8)hx{K{~;^9>fNTMu1>x4DPP%FI%V`#%HwC*J+q^ng>Htq?ge_@ zlrGN;NFJFCn}7%G*5sDuoTVwb3{{T4Fqup}2}`_qgk{(@z^t>)pPj%bRx;sUA?VUC zaMHgNmxh&CE>kcjwh{Pv@~qZb%cflmbBXmy48tzMtx|m~y6e|?1y21IW8f~tM)rnZ zvc@d?X&dGEP?QkAB}QYC`VuoddrBqkP$k>g)8Kwx^K~<0&mA@4BtdYV2Ud$L@OzJH zMQ@!ZI+JLI4B`Pt0Gp7p%gYl@S1jU+!~XKM_Rya8E+Hu?`=VQ{xW+afHJdJu-tH#_ z zC8ZVV6r{Twe)n>pea<%C^LxKBzVVIkk3C?ku%73RIj?!mYtDJ{QRUs+fYy;zeF1%; zQkumcwb_pxhO!f!Le&x<>X8+j&$oy6HuJ+a?i6FCfC0Idoof34^AM}n&H~LYZV>y zIcoyeK-Ws+Gci&sGjKnJqUmkO<)lXQLsxfEpru4|IhD|6yq0SITAbz0JvnI>opi~2xmGKMD>|GG#qUeykL2h^3_j^|4XqIz}2>YqDH0K}9&s$oq=8Cp1Ct_yc92P!}MY zcTjE!@LBtFwQ*)gvrq$c0S%NP#wC#hHsvG$CWFw>-hH9!qDJJ!UBy%lWq5!WE2J3c z<=#C}p{l6OX}DCV^DdW7H#5szQPoQ@9h)sWO$&8VRxIz5RvoWnbypR+JR7s(vBxUT zgv%2C#C`*Pb-rmIR6dDA?%udu;xv)9f<=gzq6KM#A8kiAw%zExyaI<>bu#x&^>+k! zU;Jw8DqsA-IMqFma%k!RpzS``9^gf~4S*%LheYH6BXsOm$eH-NXd$^6EyRn9Vr)&5 zc>9y42X-6J2i1(Fliu@c%pBV3aJg6a2`%19X`GXx7+NWysV-eYvU6MHMg6*j%;*)sOj$9q?o%y46;Wt7N1qYOFT(^amSZuAigt?bG{tM}n(VtfHs!>FD*_FpPo8UKT&g zng@`$C#e{ncp_?rqGiN)9ta(Dl(}JJT6Op7Z!Tr@>JBw#ODCP75cZ{(bV4h>e&nXbI{8X6XD7LKccv#S7XELGJ*1wt(f_uv2&2 zk?vI2)IR+twTg2!rx?cPro{eZ>e^nxUE@OR=^u8}t8FKWF9U93txsme?wPOk%tsEL z$9#>GVqNy!E!yor9o^%kRTRilo9IgA-drD9yp)NuUHeQ`?J$xvZEz6Zy_&!+Z)ay0 zV{!A10|o=-qYv#ZnzJdtJTfXLJaMRH>DpYVpjEXQKZIGq*1q9XWaDpk(A>V8jiN0dSAost|u=q^F$9@7?1Kj;@3hbJ>!MZtT45T6n<1DuSsVX-ap9 zGp{16%>VX7ULo&+nR?v=W$ks>+O}gJGKQQ72VplSK*lCiy_1 zuIJZA8>PX9@z>Q=4D0JkO4h0jdFP0NZp1O_#ncrO%dy=g8o%1dMEE^pf+gCte;GFi zu*+sRI%^7*94NR(jnjAH;)qN6^+?GF!k8kz%E}R2nJ<4zEwBBwCvkv<&EVRN+rIk+ zp{gyA-+Gvng3qFQ{EL>|0|K4n*me!ULQ>TTt!rhBmaa{=JQ%x2(jTlc<*qW(k8^1a z$2EFYdW=wRFveH8Dy9uPOy~O|t9a#hU28RXX2c>ww?h3Blpojy?gk0`#itb`R)7+n zIWRe)0AYY5(($H(Gu~g{kg)ae9<85I$phA&d?f9#p&e)B_a-%uzO94{97xj(spqm8 z=A3L~h!cBC?!N!3&#_nsHL8AOOqcUOn)$h<91vRq2fbQM^|-BF&#v@lzvkEx*UOyM z2t1*3FHsvmpD4=lVJ26X7kTvU>PEd>4VElFJ)u9$5>AakXX07BI`Kk#q>-M}fs0nS zlT(8X0#v4Br1Z0EgrzULH5oOXnB)ncq{h(3#nYn4$avM*4F*}H7TyEECHh`Hr)t@F zjEpj8-l0l5OuV9%^=kL1WYkR4hb~NTM%9q-ivggsQ+A)md`zdE`B=rNOsYvFfGKV$ zkPy@6rp#~M*B-7Wr+;ebZHj>qP%ZSkUJThZ%YKe+hyt#{urH-zw{sx`A zjf1ANm#pkN`8BVjH_f=waB=M(&9u3hIuYPoX1+{!kBf3lq>eY~FfYQO+yMhafA zz6H;IGbBYpQ3-~uaX6x@u~s0MzCl*A9J`cOSfNRgWj5IG^#%&*LzN9m61AiDi1ErD zV|~XjgO#?M4?W7%SMyl|wThtTxvq2{yc?=}2K^14KMXg|cqcWT4FKoacS5FS9 zYz`*1S7o|(TLy;E_69q@wV_XPZ>wNoVq1+Ey^S1utcQBoIW2Z^+~iunvw6npwdw8Q zyZo_&R@Rp5zI#;y70S?TVV0@8oUnyZ|d7-kjo3GTjK+4haSVA_(69p~8{ zN}}in{K<4vX^rl`K>lX3hQy~sSt;Ra>YK6zZP63$vW5Q}`efwwsMCN6+HrM{QC*?g zpuqk|i{$iNmei6rzWJBCFE82I-_BfoL%Z1B^dh_+ZE#R%u{v5;!P!!@wXyPzMVG6n zKqB12#ahv@+4pRls&Y3$J#j|)%k-u1l*>_LVzXsgYW=y)3AjKM4=WFwt{tQ`pB9ZT zvba<=mUFdMJTN>Hc5NfQ<{AD=Ja!7L!h}r%8Rl{vZ;x+zM|IT#4vrp_;};fNzO+m# zn$)OBq+`Wc7;NY+2X-YifC0Z$sp~Q~6&c3GAHLeqSn{rXSnp0R~t6*o(`FQ1zNVB?13npl0=1( za%}Nh?0|2d^DOSy`ol>!gONHYTxaWG7C;RIroolgpKh_F3FzX8oF;5~e^++xNy-)S za$7sk6_O-Qb1aNpXRgb>&b_%g=r=V{RMmVO!799`C3N&cm9wDe?OBcONG#8EzMa?M z>U*>BgI$Z%cQocM)=RK--`$3g{j4hYRR-zaFJR!~U`n^m)_{*t!+~MVbcVF2JNVz% zc+gUfRGa;u<%xIdf8>=|0xjj}P`WlJj?96Bh~Vq%`G9yKM{|`8p+SP9E2q z_q@r?WfRd+)NRzcXVE)cbTpky1-kI?FZOA^^xUmODXhvRYy4SafzGVd4Pj;uvR}w5 zBuW@M^HjlN?^k7D{)EGL0Q2rYM4P+-3epPW-c|pxGU+pgNK&AjK6hy>1}?^RVet{_ zS0K$Syj)B!<3m=-e(=6hg-tO7+W(3rd~~Qru0&`{EyeRYUX`Q-V@=iC5*g1*_sEsn z!bv!1sa?hN;}6>W7AMiWDvHg^zV``e{nP%|0$lCyO)Ri!W%hyLZvjG{2XE+CZZJi~ z2pTrelRe4t>N zS&9#R1<1@KT51Pr>MigR9#l8S z4Y#FrxV#eG;%SPsuWB#%7AJ;;eIg6*8rBKYo`qL&q81VEPWny$lX(4+my-Vx9SkRr z?K3*SpN{PMc%Wzmxb4=MUEg%HV8xdYChsC!tun~ml2_G^7hHS8@AXX(d7*rGYqIDH z*pAidlneAlRwV+u#Ia^ozrFQ)2Va984-sdde$zi%);)4%deb1f@h&%#R-@ECzE7|0 z;nn>2dAQE_!rtMngF%hP1d2(Um~0u|NfW(@ENYX^li<%>KLgYNIHin0l7@`53eU0N znDqsSDj>BNAcCAyGT>Y-O94;v?jz@LrHf2S?W}i7rDLk6r{}?Z!`2`h)v`v%(>BHl zk&##T_c9wj&rWjJ7{V8=Y-V0I`7KSIUIs=n>Xm94lNUU?%2zart+D&ktso+qD|~TZ zsMaUG$$h?`i=?t(9eL9`z+s7^S;H{UiC5@n;{p^PMDT!JXNs)=U!&;EjH~6J380_uE;99=oUW(m z&{Tum!l8m>_5BQe1|lr0=-~ZfiA_?JgUUcpKy|2;#??I?D0KyTZp(TDG8DzjD(*veSS%%@HG}mAaR-N<^ zPxqDatIIc(q%s7q58X9G#|&NDGz=cT1Hr)O+4QH?F}7tbUvcuen)4Az_J6X>z@ZX| z{@@NFgUPIm;9KTvfui`hdGP?GHNNFje2g)=pmk`fX^#9{NYP9;m0k9$eN{d4SM&!D zUa1+77MNN_8Eo&Hy3VbtkMrZM2#C}v>(VGHj~W17MA@l9hCpfoN6uFkSIaBJ;;&={ z&+In$R!(~5rPqEzH@FG@f2Hrux+%+%=8#ZA^l>e3g2f;reOUh#_5|Dyz!dRA--yyd zDw5h%trm*UE9*5f7{y|Pv^AI>UNls>?1-Oiq0m8I_l*D@Wf72*z`YdD%3KF3Hw7on-oh0lXIwR`40rQLPGhE`>fgA_mC zDv(}g0qNzQr^VE$1SqOh{_0Oln8|?ze*e)o2(xUL) z`~GH&GkJdklDfK5tdXXY%6d~$ExG~*S1|u#!wRX!Mj@S%siNXORuu986pTljz8hzS zoV8-fls{t~NWdie|Bb{Y=7Kp3=v$INLqKBNZPexc^?*g(e?z&dMBo(KUu!HXgQFak zC7$$))!k)KdJS1OIwh))IL|upj>IbxPd@=WndpbPf&eu|9}Sr-E-%OJsRryS$*=1S zepi6h4~m4#`O;}~4urpi7KJNT0m$XJwt~>&|BIHuv735aB`l!)?9gXSjV<5_JAE-E zX&rO!0`(yv?QKmI)`LU`(I3UDPN;;=PyN^I<6Q`bT!!w+odRU@gQiq9#GXAhbh+KY z2SV&K6+Te+G*Y8i6!OziqQ1!O7_h!y+-A|Ez9Q7;8l}-vT>J1B3%nysy0%G zUtu*ouk>9%Jr=Pv+AvSImV2Bk`!TZCw8fnb8^VAvBqP%USJ1%(E+n|IENugpqiV7C z3)Amwm97oe6$Gq>kK^5V;P&Kk1aXn%>`U;c*l%9&{FSl%If?Z!1MX}_2^83{BVeWA z`&S47P&6Q9DkS29#n37zzI|?&bi2GmHu;XMDo6xxPS_}u*V!evKF_M%t*w`zQ*$C` zdOo+xO*%ck1FxC5NaR}G*!Wz(Y47UlMrcW}!uYH601BYVADW7SGc(tZ`G7_;Pw?Te zj{>rn$;V;CH0*J(0EAJ{l_l85cg+2p>CjdAd^yssJq)0#1a~v@L*~1GS=5&T0Xx2Y zUHXAmxbP1Vl|$p~z6vQ9DdswlKGL;jU+#H}{2b4p0`oc8om6phNB|m;HpqvDrQlj( zfm=IFBJKGLf#@%4fGmZviu!SvUnjzb7@phZwm)=Qyaj%c(5iC9<{MZ{bP;68AM`7v ztbgfO#>ztufQZy+Ko4Pfn?VK<9nL?#49H1B1v(Rwz28%&KZINVkUT)|G;C>ZO~i(a z8TES=Ya~o~rV7qHKKfxD%aa-xtH31-{!B1Z5fo6*_}i(vQx6^*Sm4VW=~w?tX~hrs zHb4d8H{=1UT(A8277{6q5!EiO{M08j80kHfWWwJ+N7N_{5a{eRmBu9Xbl4SX>t%L! zCoPEaqL{x4AppgMLXw7pvZ^p7Z7ZVNKCtQ4w6wW(_HNx2u5YjCxMtgN z*95KgQy(=LRrxo&g34JWs)1j zdp?ZcX$mnn28Fr9p06S(33UJz8)#8n3I~Z0jc3X^QNO>l=pHcq%7}#?=1eSh=HNrj zDTT%(|5Ld52g~OA!{ZiAJet2B1h4qr-yS^4gc~-X(3DKNzEAx7`{p&n0nxy!mv&02 zHNdtv7Ee+O$l`_|rf99F{fK>`Ff~vnB45ScAx$j}t_~k5X5udiYq)VGQ2RkIqw^oE zxTOGVv5MKZf@=xq;Ajr`cz(Br{{FzWT=(CR{fC6)`+eX{=@G^5{o+jF{t)_4{R7dH zFP{&@!Py~n9$j%!1|RpFe|Mkk_m}I8f)_6qAP@xyFk|is`tf2G07;zcq2OAEolnG} zPqL7MVGq4kVwzY69cm>U%3xqK>Wj}Bffwz;`pqi9{WAPH_BPN&oj{e!?# z>mdLM3SVL*XLb;HL7gX#E2Pld?9M{QQ1-xzfhcdZjI+p=ter=^oOV6 z0q+HZh@?+45<2jcn;io3;!1T;=D$lO5_2{eY?G!GUv=%d0XA@gUKa02kDPu}h^Tbz zakZcA#40ETpF9Ix_*Biv|ERQ_`Tsy^`EPVYDMzi~M&cm?g;TQK7r38Ak#5t!9Dg}+8p2)H0osiv^h1z6!Pt*6=jkdqJxAWi#fD zCOAlm>R_$3r6b*F*u~&RLcEOZ4>v(nA6RBBXc;Sh#4Cco^F|urp==JW=lK1>E8xP?fY*Ctnf!Jed>fPog%HRZz2^rq z6Q(b==_P;o5L38hEJ)+1d-a+6ny+>*d;gK%6wn8AnPWR@{1l{vG|gRH&R@#B;Xd^a z99n7m6FxZ|Jm3enb?ADM>dgU5uo6;HkflG!_JG!#1~RYu4`0;@*IX%}JT?D=u|xh5 zj~_SLj!DFcGH<`lS_=Xfk?DKs;7TUkHn@IY`NIajq=KhKicz#?nSfXsjz)B9pMja42M;<(86shDCF;S?NiZzQ^&uXvpeNr zLE(ZW5ucJYurL>eeE3kT>m+rl>%R5DGP9=$_I1(P_F)gV_2x+!?s%_!2<{=i%<2Tg~5g2VoBktyPT zM5gJ+P$JK&(k-F-g=BELh~m{uX;jxet%WZT2!4XTOiQ{FKAqo)5~AP@6y%5jhZ(9(u} zW_b(}f3GN{K-{wlUeQ_PJ|}L#C$vXguy)?1;4{{<`b0iu5=`=5%= z16x9B(f{Um2!ilKS0aVQE=-L2RRS0$%C2O@02FXjxNy@{|FnUEvT#iMA`1CG8%Rz6 zuOz(!%JVK}(|Y#y=N6f14% zx_{O7evSkVF62Wtm+;)GJ=6Eub`U zn=uHV>$IDv)%Zi9!td^L=MO@CP^b{#dCjOvAAty|oC#EUCO(Q8c!*>T2cn4gu>(Xa zP#uwh2!mkbS1u9QbhuO>V96qZh868paMX4=Nh^^97y#BxGBLvb6YK%e2RzoUef5k2 zH!lDfEi7Cgryy z>q0++W+!I2h$1h&OLh|r&@}3(YV8inmOkwX9gY9H!1YG}Kq_nPPiUGt*xe2QESE1P za6GDvKd_Ea!4%iZGM_4ans+&~NY~|)320_UhRo5F zdow>jzw70R-&=fzCy5mmY?UoAO%jydf)5~a9z;h#lEMcy2nN&xbN@DW|I*C;!3FdI z3!(+(SW!1F;A}|sFBbWyKC!XyLgFSHt3{c#wfXo}e|BNUG{9f)ADQJ;&T0a9o#u}I# zq%4~s{wV?dKg|z*KL`QI;OT%AQX%(tuQi-9Cn8g-4+Q%KqJv)=x%_a}zy!d98ZJ+4 z=SvefYiF9gLI+gC(@dD!A4ULPP5vfph!pj|lS03{ECNM%!3ahT9NLVkT{AihF#%j&($qe$B3J_t zQmpZ(FamgZ$iW`+Cp-txA6#_QoWNAz`<${&LIk zaZA~bE0TsZNj=Ewc6*Sp+gO24=$}d81O8H!gdhWWhpGGn*eZel*pgA@77d7N9aRRi z^}tzNaMrRUfK~z1S&%{}ZEX+UXpvGx=O;_kOK)Vjt9vXtJLGN3GsIF1ob+kaQ+{M4 z^Z1}9P--FcidPtF3eF>k1g@nM_zgre1~(!vl!MHk#OJ;uxJ)+lPzT4r@;1m^_8&t*QyQ>B%G!6*we)Ocl)}Noivck(GEL2 z+;zzT0ZP*l8U~_v1be{^^BKZX9}(2an4nFM8qiQkuqLXp8s6@#u1^1=*m%Y{WGs2+ zhyqEDdeLh5bKEVkmV{1UR~{P!3xY1N&+>UEkT9aEZ_LKYR-|mLfDl%7^}+1Q(hd#6 z|1%BD873M-yLnW3=egNYAhjb44%DyqnrtHW)wEPZ>Fpb%q}-rI;^>D?W|Hnp$C}_m za^UK)fsr?T@OQbL8-|-}<%vx*dz&)d{%YcI_E?4ayR581p=#Woi>C|MHi_BdMlC$yFXl7I69NW7M3 z#eM-p1@|up5&EyL6*Wehb{;L!HM>gM+0`+NeQydthxm72hja4CtsEVdBqSuB&&kZC z&ny0|1&~dyZRW0=c(T`w{An5QCzLsW8$gXZG(5Z*m&6oA2a2(4-S!U--nmPcoE{$5 zjT{HI1!F3*D(HZs%{*2wtt~oSoZwzigqje{nF#?02$|$jeW>Vdb_0kg7W?9ZR3uxh zjYs#FV~djP{BBx`jey~eoDP@X?#QEHw%8<_%C%yew4#=jT;I<`KbfBNA>{QlvpBmUDx#qmH|Avt$h&dlLIC?eD9?A z>Vw=u6c8z{^_zJn{JUFRdJcF#u7&SLs?6-;-c|gDhd5uerJ2>=seRd;Y5<*CMpzsy z+pIU**&e70vXlRuG~>OGaOTsiu~e#L`v`ZqqEQ;Ob9XB68trY$t0a>frMl&azi9wc zjf?3)`o>CVu=Q05;0eF=WPCy;2j%ml86IUkPvN?KvTk z%nuG1>88k%s9F1%)vV{UJuUqxe}n(0(lfs6r86B;#huY0enq}sZF>P`5ms7$zO^x0 zCIOlvBsAc?pu7I~uD`1#3Y@uLW$)!hhHm08jZaJ*L`V5Nq&hjCu^Gx&Z@Kd0-@a1M zasq9vH)R#QK%PpSQ44el3D5%}{(?2HWe*ZkIiHQD@7Pr#^Z9t)<6NLK(EIkN!K_(r zEN;LCn)ge=&ciZ){S`A!rwGgs*ypBg>){=3azr-iexgx=yP*x@^r-7Tv4q04RsVx$ z1L@GBI{6>It=t@%)vI`x-En!hw4y?_JD$z`!wBe(H5qf`nLbz6lITx%S5zNa>JlOA z!oKEvDBbGTn)^*HFRzJLc zoWw%I5>Ky&5;2bAL5ZGT_a7RL%No)!Doix&DAvgq@;c1Vt&z?yy??B(>lyiTMH;6X zte8Xq^rf5H@=i(}eOq14eMAxL6ECJ;8jRihIwcwI_H%5Vos5vA01)D!fZJnGeyQufcEY$4fuYVc2*M05>fbjuy21wPHujo0p6 z2W`(9vTztK&gQF&-=$@gq~w0`%)3Q=!V>Xaq4wSYGF= zfa_iC#`jzE);jQ?$}faE+WvH(jjWMCC8FEDEnlm1T_G60WDRDet!{2=;G&=h4qR|- zbdud1{rW<-F$!*4;4I~rP>{n--Ktx_6KYl|>0^1dTrIW$BAvssvol}D53xxNzOG3^ z!bSpd?|SE>&E!@$`cH8003D5%vQ(lkkp}fy+6t%{C&h&q(922)2#?1ay0v8vUFuxC z9vZJR%>}8*E675ys12H$uXfC#zFET++u^jYCV%94OnMdhGS)u~V=OguBn?HG%RA5N zBQU+l&Yq#Q>xeLx&~hClpGYgR;}8<`g)C=i9#u6ncXnAnHRFOY?>nD86Qf%$e_4Qy zkkC64I3!39v5BaA$Of+!|GPs9i1fcaBR6`ET<(Wzes#yC+pIlou`eadV{d7#tb?-P zyruFqUQKn996L&s0{IZx=n!L6$wNlDuJG_M@?z1;ENoOpjHI6QNh}*g3a58<^oc^y zf54JxFY>x|$;v%*OPvu;(CBSizv%I71f+0r9gEKh&QGvI*Qa7WrCX5c0OFQeM8I;$tWZ= z7fJ`(9MVtvG(f&+_Cgiur)pUbRFJa@bGYh!oZ$Y;uctv6>)gE zX%WY(70GGfwReTZD{%s3;Q$@)WsPDT>_eO=7_HW}9e>4q!M)G|LtABK-$iFt6lxSY zU1IZ9&P{Pkr1Z3G+&t}d*sm&RF0N@hVB!DvEO(`oCf*9n3-a8sF)W#n@W4=?!Oa>^XvI42aO@0X#Ojg9(L zC!-zv(NR~88bvx>=P6zHn;i6N?5AFlG)^^6XvK$&1&LxK|E1a@$JW8%v8tQ}O&mBm z)rP?Y4=$fm7ooBhwBk$aZ+uGo#bazovN4w3aYdffd#Sl4-V03i`(NaShPdQGL#F~) zFbiib;2PxsPxA>Dn73=VnO2&G&ViGfmkl zE1(WZs+ji6^9wfdG#x$&nfAv3I^1yfq!rUt_zDa1S7HT_f}I{BdEFxaVAco$01zuM zgk-aX!)<*z*1@LPNg_Q683hG}(|uFWXH9oT1^3zJgA$&moe0lF51)wyV2ZLPW&d5cAw`U;qt6Dk^=P|;p^v?TQ@z?#~z^n|1%>_=al=|<_mz(D2$ z4MZF4Xa@wQkCPQX4I0IQQcEdE(w@CGmk)pV9L9Cc=6d-is#P%PX2^BqLaV`T<11)m z`C7j$X)EfC+%pFO(LW07NMhrx&$(ASUB zZoE=%lkIsZ*THV zh*aAhIN{NkAB`D;J~Cb`&uMBRUTOUuORip(xf9JQQy7x@y_$MP@-kiQc>{5-XwaO& z+iU`z!ms-88ZfTV)hpYc6XaVWGii*^;p$2F)3%2B&Uo41))qHn3weQ zn}#=FNbTd8n3$Id39nBR6Aru1P3PxIUB>Q$F{`&MXM~TRoPF0LXuQ`?=*W%z_?o|d zAM_!DRw@pZWgCjzb3{*U6oO6laUertXH#Iy`8sjg%l7NYZiK zvV^rt2*^hKx6}em-umC$PKOkdU#Zj2fe=TtEk*sW|}@uU^D6+SpC=g|N9H8 zvM54=+#}|JH+LYa5RLR}c^bu?cfbdCx@2?TC+yTi5dQ8W()k_!`Gd-@iP3qd1Q8{!n%>gzh8{3O2(2e6XY}Y|6FHni0a3 zo}$&79~%#O@rC11Ph9*xq!c3qpIF8|Zf5K2>m;QB5B$m$VILF(1cdF~U1wQxdRELg zJ#VL<)4OtzbZ@eEp%#xi>^CdQ(OH`xl?J{-y=@xZiY%$8NB*&;<=fU)+G0K%k8AHJ z>u^$HvVgn7>g=~~(Nj4~-3R2kFLIZcmgqGMAA&uk%27-U22+MJ<>FcIsOJ;d;+2l& zK$s0ZWiizMadt!rJDHFP{1;-soM=Wu*{dMk&7X7Wi0Q$f5#~-0R};x0AHSrhqyH4l zV8|8^{;>OI%u+<39K>9kxlYzL`vvZ82EH3kPD|Ix@R>+)S-+dlL5qPbK5r&n7@LyU z9}F+TpstT;J-mK@=S1P*LxS|U&v%(}PXxp{Khi=Qo)a>C92p^A>Q9s2erI(5{(Wkg z2sIXSc?1|hBL?(!PRHBE>tG-$;Xd6$eGtXOCt>p`5~c#s*AdL&#d#X2h(_KchZ;}O zEAMxm@-8s<81wBBA3CV~UMOct8;^grf=@GzV>1lP=_L(MYJEB$w_d4R^78^HA+zTC z1L9ajn82R_!gQLYr>lhvZj=VDr`TFL`uB1u%2PfjDTIW3LHmz2@4SqMFyERZx3B$% z!f7>*v>8{XqEoInpB?Jj3x+b4*`2u^pB|&@Id0zVuf5yzlHr2!LH+U8GZ}z$B>Phv zx54}!M2=SnU3A1y0i21fuQZ^O@98{golqbLb?|cHxoa9o$;mu zYq>YJ?Dx2XhmS-MFCKUWGqM#)k7-QslemozQr<5Un z8`PnI<3jJowK~=5r!4AN$ssxD4Ob@%?;p`yl+qi&j8La2UZL z@Bx2Mz<)cRJE*>$t`&ED&XSgt)2VFh=pY_FuhDk_7fmTlt-%;j0ly z&ux3$6~<@-uELocw*px4rzTmVIOc9E8x%=YPAblGoeN!bKsC!(6{alNLkR9Q85D;s z%eHaY%6{9FFKT2|awG(+?sz6;lY?G+A7QVblW9N<*caN7hsk$d#+GU2D(I7o_@1Lz zJrsNLgv9e`g@yf@(aoE}2D=2yvx~uabPuSh(=lUI{Y5U15y^P$r9Q>b4eFut>xgAb zFm^YlAG~QmWUjdEdw=2{+!=Gk@aZnY?H4u^^@R2AYXJvj+(s+q3wLyLY#BS*Mepp9 z7)O@Xpf2&=u()XX*k>|XTLz|RayDJK-OkL+`eu9tQHfEly64pY#njD7++hxyGcxi# z)?9wL=#%i^n+l78MlbhDn+c6^LOf^mZ%ZG)&2L0J4+z-aJ80V!@?r;bkAQEY=D&?% zH&H1$Vn2nndvQLf2u5Y~Ky1h><%T++tSQPOhe}0sf9QPF{=r!>O6>NqBR5X65ZQBI zpR;uBibbtz6J&0?=@(zJ<+SbOT(Le~n%7qrbiOd!Ko2pwU6WM(X6ZwBH<_}s@@UN# z#Jg?&>BxxuW3`{mf)etsF4)?WDa1O%CMJZ0 zofjr10ZzTIIZ#M4h#PYUir)Jd3qp;CFJWzKJjh0MBJGt(>i+1!&O4=}6F9-`#|}2@ zv)*ntNVUm{Gqct;7CG#}&{V`SHq z3v9(1)@T!DiM%VQr|U|UCS6CKyk=HMpYc9pxkZ!p_A+@?*If(p!u58j+=I-jXo1O% z6{r5YaW;h`sDp{zR!;%jCG)UIMDrpH%$<{C=33M&F%T--&)0j0H+)e=*&WXk(rYiB zG+gw#$jcqUsQGf2sVkO=UXo3WhO*amy*A=SMTOcE>U&U{b327yEY8!MDHMMFMr?a-ef|4dm0& zR8wcr+32!?m@8JDc40uwhs(_MfOFC@)znq3g&VipQ#qRg^hfYL9*cYE&}oLei{TGC zutL{{i-MZ{{2@e7<7;bq%X;dMF1^03t%aQIEr!e8ZwLv|8oe(mDTm2)1@UXW-$5-X zNbXB_0mnUI7|ipLk^YP4C%g#4=K?H!%fo#W6Ghk*X?a)V{Lmr+V*l2 z3auv7yR)>H=N{yIE~xmo1zGG81zpkmB<}-1hwZ?|W<^s%x1G@-?m*?>WK2Rq5fPgA zc>7@Q)m@hEehhs)JnXjicBAj1-||%I(1(i+qZ}-H#OK?wn3unA0>7t_WiVC0JXpil z7bI;o{r)60JX{hW11HlFvNER@DPScPtypnarlYnCABy^O(sMFn%7%T~-rK`BvLHe& z``Th&hs3F&aJyR5S>^77Cthd}+S*Jesd~%(gPmYMQOKN0zv;wb3WZS5*|E zb841~YC`N6I!e~M&8l6FR5K%t4lp0lz&qKW5S}REoqTY$nDT{^rdg4VR-t9fQOIE{dM?1mvq6H6z7)kh? zpJ@g9@GJ?iYs4q@J~aj22$7V_`n5To%kjX?LajQyFf_C|;b@K$&;8{9Aow;JT2-HO zEAgzd&IGEnao4p{zboI@%%BU+$Cl64^7Y80`z!aHI`XcYtn7`{AB>ipg+myjB#Uww zIX1;Bp?;Sq*Fd*WLK_a~tAtUluO*r|z^zNpo$NsyEIlNT0$uV&*>|_%{Mxd!($bqzNk^&5ILZ?Eys%B? z`?8*eceVrg+R{Nna)aKDtzzP7gAq_FMwJogN z*y`1tU0VxpZN2}R%}`-qkOJ@4_6v?N)VwEGumGN$)G#WMi~V~a`%~~`Zcq@0DWnKw z$i8aNL}Y2WoUXh7l8|p781c(%cP6&#dFFnb5pNLLc zSgZVEi=XF1`4Ei0iWkAq`$Lr=IiP#P3ymTxV-0Wsj5mdf8W>;7BPR!3i$I8z7dzOs zWtg=+=QCf-Co#$}&R8C7;L)kF59&?g{s#MSVRAqlVEYZlP{i&*d;| zF)%P%mN`t7%ETpmF6=24$VCvSs0^JBq}erd73^Hz=OC)Y;303;POS41U%v?nwTHN` zj|O3_T|G_GMF(E&4uF3_6&V?*7+8UZpJGr8)vilNQhs>vf$=Cn#yx`QybOAVhgoIo z$_37FB$%CRJX|;iFtCfx`aN-eCSRhL3HLR1!J5eibMiiJ~%g~+pG{1nTl9JZa zTeV@&U%;Yiq+6#9BUXp9SpiGi+8&XFbVuWz3fkAb;~o4j#J)|wjx}SBhwIu1q6^*e z@%2hn!_k{J1oQ^QAY&kMQ_6fs-xbfqR2r_~#N>O1cy+$VhP6FVYK*wQI;1PAKhWe8 z;{Xt0>&MLz!#E>gneOP1i@1_3HeuYqH$9eH386Vp7WU!qdd(78VbO>BUAMwB!t&Fx z^?9<>4hT?j{QUd^-X#7f2K*5U24v>Lf$Vt2$_8T>Wc?#M3MpdYa+Eo+6!3#0wn?SN=}kJ9utT78@|U)EvZ5cPQt>-Eg?9P zxseo3%iG4@0*`a_CaPWK_E)FKY)0&0U(C9{8P}M8%@aUc?@1G(>WHLxwPvZzZMkhk zF65~;7ZZzCTIhpXlhA5?=N5TK&RIfzT3Bf2HFD68}H}!sTcoA|Qv`q{6`nKA=s+BfH1Ph2b=qN{q?=fa# zG2dUtT4#*lwmd8LEGR^GCr3r46&hy0#cewpn%x~A78DyFUjqsD^;zcLu`bkaK%rMi zCRn2tr`N%la%{oEqqpt)%Am6FArJ)(a&~bvLuZc9(!A{UQbQJujNy6kBo@Xq(??J{ zR5!?Ff^ax;8lhG0@@CS{ZmF+!r=6_yWUIOC$!8qs8+%e0{6rp`U_-wPb>;--Zdw}; z*nQsl;74Ih+;01pubH*cE0%gBAE{N=N<8wesIX8lT_05j!47%Tm2b3G?Po^D+uSIZ z2-zIx)!)D*;MUg+gGegc#p0F-QV^nm!;OiI?uZf7rM?s`cE5m0FQ;-?ygL0O@Jvi# z;`$wGwp+J!{PjT7^S3wvthI8Pf;qnI7MpsX>&$ysGnc?rVh;DsNeswn0*5JW(>NHe zdA(8u6%4BvJKEGQvl_hZeK6$0X|0Ci)xSNREV1y7+QU)6|dWK=#Z-?mCZcD4>t6_s@ z`QU%qNEb=;fuJaBIWlTH#OZ@Qgs@Za(GI!Q(&~>aCf4 z0S2g}zUOz`rhea8M51}~=FM84?{;|_jikQEJM`x}9cjxGx$(7vdKHUoW$$JT1dYp z?EKx)38T|HdJZM2_qHYdrZ=wf*hLKPWE=J>~XOmHUMgQ9U}erN zDvi%qQOh$(R{!EcE>J zf{u&(R{L8MWMKBt#~3rF?TbvRff5Hrz(5f)CyXm2Vo={G;O9Xw1C!&^wDQMBN8arW zIPbl2>`A(iL}l{`pZHq~&^>DD5@-d9&&|DNVq!v9O=hg6XPd0$$8g?NkowvAXAiI| z0T0XH@`5@Hh%mFMY3L$;gcU{ThU3bnd3+NA>_cbQpTK z<7ZERJK@kFGJB9rJ>X;R56e-wePCsrUskcRG)goAVV|9>W8Yu?jA%Dr?F2D0L*B== z)KjmD(IS_WmNs219D`Ob6O~#-Z7z?#%Yx*cU=U@Us>i9Md%_2R1<0 z=hTVnHJW3Y_a#OJW34?9M!N4F)r{|kBaqX3ECo?niX1_OWMSObA-`_l*4~?yb(7G6wyt_i7OGqbV3ydWSYWh`rZ5X(q6Y;H@XuYcEE&&4)*pXPvIB#?nYg-85B zDjrY{T`$Va%xv2*^PFTyHVRs4kX_6FiB(YAhLZ)`r>#R=fK9kPtk0c9?#MN4gh^0L zWK_JV-K$7=M++XBYF%VhDI+_p|Aj1yyA0RwxdV%VT<;7quVY2U5O{?KsbGeoGW8`3 z2W3d0WVH>k&=QS;v;9D?^Z_B0$@Z+630D=6%&cRLGuh*f*Vu)-ZA{D`hs|rfw~U5R z;UhBNWkhdg)xO2i;Qiibphl6Z(%Pd+i^8YIDC6}=v7OES`WU{=$VCt#lLq?0&4}<& zUV7GxVw3aI*97Fk&IB`ki9wJujjW_5myy7)pCIhex(WLRED+wb1!IvR@n2srWnLW1 zl)MLi4>`0(TNnM>G=Znew|76m^Nm_J2i zQ%2aWiTX9u_5Tp|)?rm{YrpWM5v03Sy1UZ^Y3WW0MUd{82#B z4QH^{yZ3o_eBb%YrPsP(KF=Iu-1q&fL4MUbJ&hSc!r8Id8c5fnTPI*`d$~>4XSm-` z9Y}}#HgVk~a?I90i{ENY3N|PH?R%U*jnDN1i^0zTk>P07nqgZl!E86VL&-$^CjsuXS($42lm+@hD{rI;f14zQNopc-#gLFY z6CO9Sq=ErE3wB?E;Se`$GfYdLJO^u%1}px>j5joyvm`xPYSH1Nl#uSb5S zEle)Q+tHJOqF)UyqNtsDH1@vSJHz8eEf*b=n2!YZ);O&5W7Ey&QnTJ*6df@73+4=lA^Sh**PdQk1)#xQ29$b0U0tHq8uwhhW)S|Nc6+d|R_%v-n3!!$ zcRK=9k&F-VrRV@eZfiphqUi9b{4n_mjIgvE9N6OSj-3_BqKj%W=sEJq;p=gv2&oP3 zXZUikZq*#wq-s>GP1F6a%`);b@B;0!cAY`{h#!!GIPdm%W$OQABk#LVy&r%cBMH`) zDpR?~Dxla3g&KIOppM{eDO-oLe0>9W!Ez>J`}^)<8i4yz6(^KNJ&tO1`p3o~7Fxf5u zfB-1K6H8mmv^I!=4=dS1h`pO|N8gV~0WC;-@-vxz=IfJjcLdY9If*afL9ZHWorcm| z`6nv9&~9(9KKg$2e6?W%d;41QA&r2%cdCFFYUW1EtYSz8<}8iQNu@VH``e;T!xH$1 z(w-unUtP7fwjxbe?!Z_36X|?nrW?IPNAfk5W@vdDM{kkPkd@zHvYu+nCZNP+Q#SFc zEZ8{2R+~8DP2@OHAfuvU7(KVgc=qgBvDcOJ_H-3awruqGI<44RC*b^@uAHc}u7fjv zYCkF|0b(Ys%`q6G8Gh0*&u$eF>pe~5dT}1ipAb09pbT?AA-9~%2#^xYDcc3! zf_`*$mD@!p4t`rL{Fj;m@PD-X6XC`{hNX-IT%BA-6owHFeaqx)kIKPm7nW^^k*FM`~K-M2R%qcp?Wwm`~ z(48Wrq#!GG|0FiTvz$D9f4nhEktq1FBSP|q&8cMQBpV?vNSFvPnrR`s=aG?7?c^bL z1*A>EF?!`G8FJ4hB_&HNO&a~pNEoFtxq8D=>-rg#!!L(U9v@||=*c@iyuOwVMv1<# zG4@*L`ML|~jV>$yDO;VlFwc1B`+Tudv5$NL60G=U{sY*H|qaN82;x32Y5dQ zyhy7BG1V6lGyo{xU*DsI_=A57ej)gtR?SzA6&{$BI52}-DX_?y3^uDw57?{-TAii( zNbrYWiwok}SQ%?iO|M}iaXO(e`n_2dV}h_mw(Ym{TbIYKio^@WhrSRl0^MLpXh|Kq z(J!`gBQ~zW(g(&5hh!CBqspz!gC^GRP3Eg~aP@hds0+t4#Z)}oD6(sli|_%AY>94W z{I%e(1LYC>z2&}7y~tqln>)K4JB&;YA1qeAQn4LFvDy4eDlNq=HcZ|b-|VIp`kgSt zDx^Gl@T2hQyNZ2%K1kl#(5o7}Pt7`gsWbWW#^Um~c!gdiV*@M_+TJwQjze~ zpq(VG>aW~Ivnzj&HVDncE|d23=gJ;w{5vq4bN{l;n?x#Q z5#sJi`vd)V<<-c$m$et0nLk6D_o zeTJ5%KDaoKl;McE_Ei-e3oB|hEe?i2E>m>3Vb^9;)%{fMRxyHFEbSc?p;MGo znSz1!m`9PtY#~y@h9q%yWW^IDGm!!^s_{Pq?w_m_JB#TDipB>)y?JDDPkF@KxW}?6 zTM}~J$(JG0fKw;IhW$HHe-9}imP0g(R;@CbufLVv=B#u zYY&oMhw(c994LLHUfLG*ez+b5TqqY*u{CCLRy2z^te2Qnai;=j$QLntp*~sYrMTBb z_=I;wH2*HTQnYSoWb0pR9vH74vEF~8VA+7HglmY=vHY_=X2jYTjL>C1qD(Di7mlag*dNd zzI0GVvA;(LldN)G@(=@_fi)VCkr@1vKI936mtd+-k4|Ju4GI2hhBl|2xQ^>Ky3F&~ z;oqR8``R9yaKGYpA`E7z{~t`$2WDkLgg87no{1T1I}bt6jOjxB0i&W77l9y~F5*`E zf2DetI)J#YH9&vHJOl-6 zI@-1?4cPxL{8M9~h6zru4sd$iV@N2OH08v3QYlP-KSq=dQ)K{n+u)1e85;VW|1WB| zzeCNS`XCX&c%&?O*(1JGlEP}t}CkNIT> zxe&I!s~2+rt~>x+DeVypkdZw)4Yu0)J`RyDsQVx=VNDFlL?I=F)>NwU3#z9u--fO0Evt%BCwTf7*w~gLEwQ;Tr z$?<#fpWi>iU!n=t%VnUX`)~DwMQZ(JQ-{A12HgIQW&hMq#QP<{m5aam-i?yRI%0hz#SSleA>N?*pAYD} zlzIQo=g0qEmbNZc%{Eod*So8`0J%jKbaAP!scD{_r4n$f(B7M!fm@w>Tt=W2#hu63 zC!CQPaE^!A*xWBIrg36oY3dQ0N-Pt7nW%Vk5xQBR; z>kR(ulKX2HfOAr8^NWd2C_wp`Lh|(xs+4^PkfdX>zia=(+lZPE7V zlfid&{a)caCUpb^1OPpwqggATjUVk^m$^H3uIofGJ&>I?+O@c;d2O`0C&Zwy{&_X? zU#1LYAwsSry$;F4{o>Ws>)GfaHwJoogYsQ&v&0H{#a=Z?jOq%;pqDU;QK^B0L$CA0 z;(ABlxab2IH{j51PI#*n-O&edd9vJ`Jm4-gAHpf8ZF;zrzlzY)(LKpM zaq)Ly3%pn}zk!w0XdChhOqhq`WH!uk)XCcIz}+}-ezh~U<@TrdUrW|q z2phm4+p;69lxHjuRm|cRZk39d0R8(F)__)Vn-FK! z?6+AeZtkJ)RYNQc+_dkukE3g1OuwIGMH7=8+LhS7Go`P)DA_~TduQDx5fY&!)B8lQ zBc=PdT^X$agNEV?3wp23-p0xQ-m!!dD1B4D4$9OCa$h21k*tpun_pNFXfl*FmQ8M} zq(oFj6jTVEIkkIgBE+$JaC|&z$R4|)pz61gPKuYx?2#U%_OQr zPqsyWN@n~O2IDN%^^wM80wZ5rM+Z8xz0*xuriJ;q`G=~^isb&)@UE^Ol%w%uHuaJ$ zEIPqX(X4U372|f9&7VF}e+dq*O}l0tmD|(&m#KlN8sq%p;t4Y|88pZ3Di2OF?)>NO?19SH$Y}M=YcrR=otn2V^c@=Ei_ovpj_>U7!l+yISDlZ#_$!7MX z27L0|p>IeX5*nq`Lh=kkFwG(fdDzRQT}tIY9+&0>|rQ5xini;T8+Mf@JbsC=Y)RWQO|W;Njqgfb5@j}j#Zhp&=imNd zKwkBd0Oozq);>-ccj!jW!@wZG{&{?**4OFu@U(~6wx{2Q|77i1jO4(QY{{*J z#N3zg@b1WVEt@fMMmLJM_I4%bOJCD))4`y96idGZF}7+Sl3DhIFRUN2}B*@a2pEdhu}5dq&+h?{H+ zcKw+o=Qdp9L+qBjPH6ngk53_AwJCE*^kei{xjOW}goO~~bC-1qa#-PV_>8r^z$AE)|@;%!- zi9GlUsI586&xx3pFhHZE`1+fM$bIJ>d~VdWwkiyc%Tb2_lsLQRYb>_kqh2POCO&+4 zaB}igO|AU*+#{Lv3esLDb&4e7%|D2h&C{I}+{4_8Wlx}gnAK|Md*An5RE&ODFsZ5ef8hnJHt{ zYsdwL1&oOM;Y7*sor1bAB{STo6r3hfTz{!R!s*5mOMgH?p^sB|ieS`%H5bixEnf#9 z3mPT{TLHuVCt}O5RaTdRZ*2`Iwt@?QA2DHlw>`bcu1$oda$^9vzx zAvt--j;!N|$N%2-Og$L3MHXVxLke!rQJLcq-aF@{5ZiQln0R+-n-!;CJ62#)| zQ}e~Ydy*Ex_Z1xhZ|+N1$$0s|ed$_wp7K434mT7QD+l)c;{t;OYZMGx2ya;5Mg}ME zx6=3VtFFncy@Sr3!ZYx_FTjGJzhJPUa>2dt^dS#LSEH8?(-z4IpMZdVE-n!OOTC!Zsg-^8Nlz60clLv>-MNWvKNYv;lHn^Lcn52^Ek5@|+16aWF85 zNE@JoS+GhFbN=Lh=@B))$>-+?z27jQ4LMF4D5XE4zD5H!s*qLr$2Zrqgc=x)_W#fV zF!e$DqHWIXNuYKQn%~xlJ2~}W1IV_spxV8!AY9aC_tj+SEl`lZ2*4nc!gU6W8l|W+ zJ{I{%wC>ETHTDkQjJM~!3pi`XBfN|{WuFc_` z)+-&@7{C0rog9M-XO1`PIX{r7C>1NLZ z9}|;L^1tE>2E!6bu%?Yt5ggBqe<@?F@gy z!}BT_rkWcAA++h4*hzUu@x=3^wHQ}Kfp*wR)@{5vMvhoaKY=TwQqp&nGo-Wm2p+TR zd*SQWtxn*)&mEOIi{{H{7WLsAYl`morv;PeHgnJTtbQp^^}Q5XBq3qfsCE6+*0g`c zwd+ZFx+D5Ury{Qv^iEUOX{*_ruXmDar204@btA{}Jwaj7{iPt$^UD$z~rv5JdSd_#Wb;6*&SQf7;v?y31$I$8@J5Gf%NoRFrhUCEND{sU1Bw}b-LQ^=B%O(9ZYWfv*J+eCzf{{H@sXBQvJe4tHGF{D|oCjB~>MHPcn ztFZ#qj~`RZ-FUaiX;PtSX{A_C|XDvRNp*?YM{I? z-}x;XG+v}6RVhU*sLZ03>*Ji>@zyvFy{v0*r?Ig`cEjzh*`J$6FAr|F!?*P(W1>qz z&6If#0y#kUIEv0Rvm%6KRNfLCp@fVO7g2>H;n0HUbV5WVDB`_b;;4B&FhF@cBORMsD?(hA zDO2ysW{39~ugi_TLeSbhjan_j_)`Ze=wSn9IU?vb86f6bLqOuqeOqKlDrEG?z-@&n zNrZ@P$e%ekWG)Gk)dz`==SqCduVi}ooh~f!N#+x~tzu)#iQ>nA%?$_M9cb>S2T$Kl zNwF6HA)HWQyq*+pPl&p-IK?HsLHEqa%v5f0$C~jvoBa|R3Xe7P_O;OmX1R$!-KzV) z2j%EPaumpy(gUfbR%E01cdB@~tp+^#tY?%JjUAdJB{m-8*4YNXNH#Iaz{SOdA|Roe zobAqPHMp}+r1T>6&y+&y2qC5o8yu@$!?f#@nP@=D8UO-EmKW0f) z0VWG~;o>qNB%k5=K#Gb(9t!L-#%H_Yq{4^Td>`kKA3O*u%G1!1=vdlr3yLD;HGc^F zTw(TRy}%ro9mlmeDc|elydGrDu)cgU9SkdE9{*q#FE=AX+S>!ew9jHW=G^uwS*L`+<~9dQHf*LEC9 zwwU_l(ZQ7`1_G_eZbaj#Nt^`kzxu0$Uqn}(bk~<}L%4YFurYC-?|0)C=-i-*xYS}l ze%yS1b0)bD%!Nk4OjkTD4v1i{P)cEZcHPRzPr33J;}M1HMb=LB2=S8te$@v*5S zLUU6*QJpqN2(8lJ*7o8Fig%Y@JW~DQSq$Ob4`;lb^SRTXJg+_)sKeDNq5Pzj+-wD_ z|DsW-YdTd<7I!r(Hu6&+>FSiGg_To01Q;8$o)xyqpN6#tS~v&I^8L^G_%3&F~iXA^>@9(!x6^M}>y9 z9eCgp7K>F$V&I7qMy?$qsdzquxGkF^`+vd7YApntuO00j-K#gfoK0-|R=+zGOv1cY z`^fBgC(@szm4BQk&Q(lqk>hL#1qRmTW;B`?dvgsX=KbWMG(Lg}0#@7hK>c}`WEAEA z2P&ZDZ_?CHf14|CyA0^nAOB*t{FU`4A-XPJP^8R#ZRTJjH#No??{r^T5tvg^g2j&d zc+;+=b!1<3zK~=4H7I%aY&FtzFV0oU;#bydt@E2_YDH?=0>uyN=De@0UEMqufj6N8 zXj(02Yb&bxbep`zcN@-bF&BcjIV53(@zIfggib%yI#--P3R2@4|Xl>pa$ zmwn>#sT-M@8ve9vfAekh)B+6zCQ`>?VoXKQcx(B4-=Hqc`&1cqeSJL9beBfBvA4&$ zgMo<|brvCcK$;T2&xeTEvzQ%gfE)Y8-)JeIx%tMd<1!CLOT(ks)i9O_@~Wz|&Sfv! z$rTtFIFOA51P?}x)E5PQr_8w&Vd6Ykh$}Ygw^DiR)vp;eYsC-oE}xU{b@uOy;TNV$~Od z$>_!@`MUJc_onPd2FC0!AMF$!3&z{ZHDkyqNCLBaV;-}y5k9>-4Pvg(`2@QkX(m3^ zgC_hr+>ZOb7H*+GI{flvClHvPsDS*CIF{YKq({|Q`^4|#)EW!KTv1PDT)*ygsWoUX z!+A%i^a1_?4fNjE0%|TEXofTeUR1v^JwY%IQb^I(@xM8^qB*x+3ba&idK!cJMsYJW zI@B#efQ5w|_?ZK%Vg>OD@QXb@;-+}5zALw$CN4AY@93HLejaeRQtU8JU4MCqX*rrl z1dQ6@HAlo5W=G~gS%F)8f7*(=KGz^>;C_(JC+9)!oIxbX9?T#g2VLpaYyk?mk`g=c zt$F;=&SXHvrexEw($mFfN|Er=m~o{McPCsMWbE2=e2^UXl}Y2hJWvP5oYIIoBO;i| z(y|>>_QZ0Az-4_)E5%$mC_5w2GWCRg)P5*ai@Kot8r#WTy3sy)LVU70UTJbCQ=liv z9BchkJ)gmOZ<2PG3OJIX;GoL%xodR24g!X%Q%d=SX-@=ggBzDUR2R0!yRfKiH(CcFxwV1p5Is$G=&C`u*0=dGWp|t_4N|DgLvLNppvKYH8VPKEJKByoR+IOS^CRx)P zJtC%Oe*?3@$`&N>oO|?HeE`5=kjPA@=#rd_e3Tr}PMT~_l-%3Z=3JMtl7t;J1n5J1 z;Yn;d4v1fQ${@UhtZS2@S3{Nhx zaF%M?dAkL3P@Fv^4TNAB@7C`PhcYHVn58P1tnLza+{Xpyhu?7KN0KV3J+Jczo}LZL z3~8ZB_6;)~&uz4!kA+|A*>Ksl2IUsys}((IPJD+l2K$IJ<8ancIt&kcVslQ{mMRVA zDelF1i?&Pa(aq6&!+BoaBTO4}`BndFKyCLhjadDYkCQ`a`FVFc9qzGPgNY-y52>`7 z4F+Rmt~OIVmO^cE4km}r7xrs!JYugqq*tM&9BFTla{3YBI^~uOXy4G^0YSrK2MQe> zyd~U)eILFxgzHp=>b4qUoJ+iLJPq9Sp`9m^Ye^B~sUVSpQ4zVLgM`{qwg7QS@oG_eH_%qOG8!p%k^}Q3O2@MH(uBXQaM3iln zg`)eXq90}D%W_lQ$wi&Fzh2tS5o>YdHs^lI-c5?w{}_lv)!s>Sr-Xf6f4NCv67XuG zwX)QEw$`O{IYRt15vzA>ZCMg<_&wmd+AbSx&$GD#Hd{VRC|qpdH#+79!?!>mAsQ2H z!5Lemb(XLb?J66pT~en&DZWj4$oX2TPjE<2KFr3~s;CAP$QoPM3_U*&B#SK8ZWR&8 z;n!NejW@SxGVP>(Sa)KsH+`Y={ge|6r%pi+Q8D2-`*=hAY=(9YW8K=9951FsQ9hhv zK1tKwMjG0KzwP>F`CML(RmR?`Ua#StAP1TAOxl5>5l0yGq+$1jZ} z5wt*sP|?THNx+pqm^oQ&m2N*_nx)ZzX<*IlSI$uIti}?Z^|qbza5uS1riyw8Y8^C^ zl7trbUM3F~LTp`HEHT5o7a>6?=Dytd*c(MyC`s7{rYSd}(5bDGbb(CM%Oop-;_w2B}s_!RM z_%DX!pNtijvu28iBAZ+$ZCrMk&sg*P&>{@z@Rqrvk6GhaekTee$iyNVt?ua=rD8m2 z{%K^B0?{;_MLwJLfqlSjYwPGS?*l%~6@N-4_%Lp0?(%w3j(HgQqz-f~>sFgg^P&4d zdvNrOoyZ@;_iC`ufi&Le>FLHm8~+;QeQ-vqKodq`EzPac+%|nOq13$OfJZpr-0Ie< zau!GI`Tjcb+IAQp2yMX>xztXQBHfO$Vt4qKtKQQS*Zz{td@!C?LoGDc_-S#UspU!z z3hC@R*S-e;%ea7Lq%B1WKEdts<*J{@3~|4!HDX~Qy_M8uZT5$D1IxWl*FQA`&I+q% zNmVWT^LFb`bu`C4D__cUdHhfK2Cf1zd3+I=!m-##l)CMNhKnI`Ei=}VdN#(K-t!da?v+q#)RH4uk!5LV-UAKp&OMLDS zJP+DqwMyT{&nWeBFd#l|;jAU26n|gdraoQnMv%a74K5W%>+xmpP%}-wj~N3I2+y!( zl7-x!9UD`jZ<5}f&Ux$+NjZ(rx)`4qR`r;lrTW}H9{cG3R)JjbRTP85Jp>@1bYAk^ zXYG7=eclHW!=e9~-RI-ey*XeHvQYB87#&}_vUDN z=VoVCRS|7XWcdYRr)l(09md!#N+ceWfiKJdbO-hZP;|*TXF1}6F({;kPYC{al)R2& zcn1mfS!-4+`uJ`|C&QlvJo%HDJjyfA0r^nQXB=BDbz-WH##V3=_7-H%*L_@D(cC2{ zNj15CTuAL~A)JEUes?&1N6DyNDuse!^o1J(p~Ob34G8bzscQLB^#^3-14CGp>xpE- zj#D1LMPnam9ZZfAB7I2J90v_&ro{!cSiSVO%Eq|ARV=LLNKYaXw}8Pn>7(%~^C*pF$iPBtya>d7>2YB0D<>|#8SU0moG=(|7pjPL4C)LFoi zB)VmL9!S;Jqr@e$K?+o}H5yu52U|MFbroBvMV%?rjTeE@R$%ldoj+*pr{c1~6Vcp$ zdzHHory5c|yQflBL}Ey;ryqVKsd!&*FrqAp9}oFWG&*qZQi{qJOgG-$df3(RxF1u2 z*?OMN19jb89?zoyAqSzUx82gLU`iu=C}!`4X#foA1YGu~=d2>ss}PoB5^h zOia?v{EceN5Ew!P5)}mE?tMDo%pe7sM|dPKv4(!E5)@40bQhS7>-nOz?Vm2ZAk&V@ z1AmTY29hApA`VI#uz?RLH~IgGo|$9B-Tm~Z0L=mZdaK{7yQ_8D>-|<7lc1L*p7LWA z8kBUkHfwrMn3&Lk=>U@&_#-|{9!9w0Gim=)n&})4TblmZ#gVM{F6)uJsU0;aYV%y^IyYLUz*B zE%3w9aJ;xHV_$cazRrLJn++StCkl`O(AagsH?aLA6G62cJ2SDtW!_7bt&a3AWK8#1 znalLD3Yh4h7Ua9$WDPPbF8Cr;g<4=c(^baT=N@fD zOLq*4_t7o;&1f-+ayf2Ct6e!BcehBu7|m|~X!mGPvhz>(Z{>e zlQX}K$bhJ%N;(fyjWwL~h)-Frpj8FIn;arnLC!l)>e_B#e|@9!s9^mxr&6A%ixVXh z&wce_%k$@pdxj#`N{S|M3kcvK5A!3+^#F=Z*o6V@LT^5o$mfPn&?)M<;&@dh&MQ3R zY7}hB?L_!ZYIMOxc$nQ)5Gtu!GJ|5YVay{eOz#>L9`r3Li{Ufzo#wA(1>kL*nVkiB zQtX%~Z7*1NlYUOU*c)&Ehk8Z2z?1609@J`I-&}q-uks)O7>lyZqJ73+BBjlNs1$YAq}*0E*LM| zcWc&G5<01U9yT++({|zqDKB!u3-*HzXOGU`qDx#ah?j6BNSLC6dc31H5d*zAYV;ew z>+E&k+{kTnTIkcB@2X#E4y10I;`hmZnxQ0lLn=5kgfj0Xs05jq-m4GTovmGrF|NvL zcXeM3SVN|x60()rUDJ!9Xfij5{TlZLB>p|5kxAB!;Ili`K=J(Z!y>QvOD?(sK+kItmE_-wvy6rh_>p z0MaA~FetpO!U)M*QcMz#?-%@6pNMUunr_Jhv8mgN7in#+bFKCV>h^-KTBT_`p3NA2 zoNXld*mywhezqH8k?NrVr<#pb9e3WD3R0gNUhYJm8fsq|H&h~a`CQIo8k!Hlx6ay% ztv|Mru6QzEq^AX}m21l-KR+a6+uu(|qiOroZD{%{3pP}3=OyfEo_#D#S_mi2jJEr2 zY4;NS)t12Ki}RP&waz=_@u?1qy;^3Pks#AZ2-Vf{!@<%^M1}Z?TM)0&Bed+E9CV}@ z9tFgALD6%KUTC)46L{JdLnvm?o@M+-f_Aiv;83i#wFDHia}{TNZ?_0ApenEkL%S<* zHq!`oL#VgBQ!tz1Kn%x`tAdsF#A*e<}*`jnK!eS<b zsTq$|sVe7dunRkhEKbHA9EJ?00SLz4K9-xd8r{U`vguYQ!hWR(iFwA~mVo4WU#xFq zmCX$fI1Et91>ZA4VD`~HuD2I!^sm}Lp=7mz{u1Q0r+{XExe9BQ4o@>|YOHpc(JFbn z%zz}AAs9*p#Syv3i;pVLvCa>0-s3<~>CTU=6pm zS`+6NmrJ|@Xxq+W2P3K~9iScX1HIx>`cFq;z`X{D8S!@uy$Pa-!^6Xf6heVwc1u~;J77wTuGt4BD`pMt zayS>HFPN%>)UZc+`q4kM03N{aT(yL82#CU5ZJz^g1FIP)2Wo}bh=YTm^>G-^<;hkc z7zlz^W}pay-AUH_3(bi8q&^|s!|V1uJWmiER{OrSzCl96dX;N6B^RpotSFGii&B9? z@cVi324uTN4dVO?q7{fuwLkJ(33ERKHR%XYbuabE04UAG3=&~DYnjx437SDCL)C|AQbAfY#SL{#)KM*6igOhu0c3k*4%%GmQY zU_E0~&6BF}`jA&vu!!3;y!*tFEm+gj(p-<$AR0x&uTqd#kY;X|{h`NpQCi%^^`RA` zRvNS-Eaap4<1hSBomSjx^Z>wAa27njwrXTS4&k*yM{qMI=d)xY)~EC%ZYwRQ0Qqli zC-iU>;7v?isSgoMIlvuHQ3XtJ(-f=%Hd|*CYR*&&^aKqwt6vs0mWM)fAIcL`i&AvTwK@ zR@wTJgr%2M&d<-YQjGWszRXlY>TtR2z7cpH(!C$g3u2`9dA2a_|6uUZA9Wv8D&(3ss4>RGZkTE=}zhLGA$bjJ$Zr%8f_oT zsjQL@*AY|>Bng5aBR4l%MBcW&usVXnWq;^aQJeRjqgKs2=Tok76B~`@>COo^$-R zzPv?CEb{?#t=TcQtMx@_)ym?A@9*5_$Rh08r7t9MJCSvc;=mj+nwwjW>dG6Y7WJ?#Dm0Uf3D*yKMmAk#-^tau2RYYp>q47NEp*B?B`PG}kJ`#!$%)4e?%q==s*QqxkqRg{t-^25I zL(_E;Os7MOpLP@znlEneCz{e>z z4qQ=3OU5It0shrNL#UEuvOkc58Lw9H+5$#Z>|#Xq@Gh0usJtsaW)tz-*j{-{JkN!d z-OXv2X#@s(rpJr1wR?iqk+1NHEINc%W^=YKg3{jo=8nCq@xojLK-kO+76(f-Z;JHD zCWRz`E@_vPA0MbIP3YU3@IJqI@oM3>l4Scuwd;)YG&-Nn4AZE!F|POJ1zP-3c=e9J zcV^XB5Yn?4%WusWBlT|c&4Ys^t)H=;uk=L!UWy{GuC^Md_B_4Fy^sEg9W8&dd=rWU z(5>l%{pUCjjx$m?>(8Hf->hhYC10Qwc|dIw>T@!xsnzhFD8=6?%3JN!1h zoD$h7vuiA(kUlL;U|yHi^YJ19RMG?R0}w_Ew6VM{Pb;ABO-$ZDJeQFoQJk?E941^RI~uJ_C>!9a!pR zHL0VdMu3wbEP)%vlbJ&8p0?KiTK1t2NyJSN`J3_O{;U;=yij34@Y|pUHz+Km^_N*M zE-`U%n%7kuz~lZw`2zDLo8%2yF+(mfgd(KT>v9Q%JrGLXb_6ypxA5QmDL(SH<%cZO zB_+~~8@MF54P*?weh{i}Uc>hXGc_H=C_GShyXimM2eAOX59UD@Jo*%cKgiOM_4ETX zclds9+r+}VWg%ZfoEe{{53BW;Q&u3Roo|=YBe42}_|)Fn#qJRx_+Wc=Hwrtkkm}btzpSzm)JYTz3Yw^O5(UvW^tVejc5k!6uK_AF znWrYt5Iwyb-tvA%n4)nzem&{=XvX0=&z##8yBdIs!zwp^Hxc=aiE;Psb#F^)q8JUN zzHWH-4-=AY5}GYXdkyM1N4dKRWwAh^?sC&zWsn&OLZqpQd_EX$^$I+8>m9^CV^gtl z{}3pcht}=ZqAl-{hX4>w3p%k=6%nKB24CyWO@Vh4Fd~cX?;qx-diqn;&3u`3SX)|& zHT<@9dAj3Al4@~XOc+WgN(_P++d-|3fWxH#PYeOkrY1V-==C~P@BGMqH#GwmIVh0Y z9pWzp?G0AQtapw&qU3`lM--}wEeQ4eLtW&Q?G z&Fqmbceh>#fSp?g8k{zOeto{cT+IOX{`2{^2v9zojE>hu+b%;mFy1|W%jQ>rWJ%a(Kf?I!EGpb_L2*1(GpQI>ak-u+>6n&I7V zddEx!-QZE25_WWzeF6l@n9?7Q(0`x@f09{w{_~M@I--u zmI^2&64wgq{-(3$dEL0@vzrv3xFJ6m-z{(6T{Fl#_||-GW$w{q-+ibB4DcaZs@ND{V)<$HNd2oaRt{-uKt z4Fxoy1s4$-?Xa#nu(bm8qV^v3z5&4uoK(Bn!>|u%u~c(I*lw0cJTrUTS`KwUFFy1~ z2p@5H;E3q9s;26dt6JF*-^>Ml`r8$Z&py;UJuKkra|sx`{3k>&gkmYu7yj?_-0r-v z^}_<5C;9PlF^1nYOTZ$C*>qlD65sh7*Lj|q#2Y^SvAQ2Q{;Qf*ry_F5MBE>Yg>9J- zn}x5B2%|a_?}HD;dNwN$SQbX^wwJRpp#hslQeC2Z^-3h3H=ZIJZBfEXBV&cSNQ;QH zjQZg(quwHM#p_L|H-&%zNwQi5oEOheAy6j*uPa>=9%P=hABjg6;)h+bbc-EJM{I;{PcvZ zN4{>+mbZI!$AkxZ!eRT#dVW&M^p&Qv*K^vP>2ZZoRfdB{GWOZo_|R@Ap0M@B(aw(r zZiyV{Q?VP@Fwbblen@j3qf%M`CW*3~9A?Aq+c{fG{jz_TcbYI4g2#zuAx*g#H{*HVh*f-k=sk|20`aqNN zXK(ZWwDr|tQMGHoYi1a7fT6o3BnMFGka$5l1<65DMWj21Mo>TjK}1SWKq+Y?h6ZUw zy1P@P;Vj?t?S1xpzU_Z{apARQ)^k7i{kwm$!f2Ngl`d?J`*XNX`*4L#X})QTyhZ8g z;?(HJSKIFOK^3DtVX|zpn71f9qKtuXx1benlH9{SxeVU#Dmt3Il8+r7Z-hnuT0URf znY~W8@ZVQXAB*_Rl0`i~5CjG)JP?zwJ-<3pIqME39VF;r>Rwb@!&C}cR;C5+cUum% z(F7%E$t@xZzA!33t2^b2^RVwZ!S9t>rlc2sC|>A$Y8zNwSbux2M=I@iI>fL|C1Ssu|zNz^lVt_i@`<+dZFdfvUYDgir^g+bwg?aj&(dgw>(;mz(Ps zhm8s#N7o8qP&Wl%0yj>){z_;ZG63FZOh6>*IEK0+Zh6A5*ZS=F=%zJoM9Aog3^K(w zQao`l5?a0_8sPw0<1Fdzqv2xh3Dg<})vJzszpS1&ymk0ncs^lp6amff ze9-kVVP%2bbu*FsluO4x?;`>~{3sTFj74xQy-L!9+@~8f_yMY1J*-DdE@nPx(L;$_ z1djc9tyk@*_FUY0t3X-HeD#5ube4$ihRxYX+iVU$LQV4Chxgj@TqN7AZubm zl2?d94K$(pDu`R;xTOp+-ZEK#^FOciNr=7_r0^%C*#_ z5ZulDIYd)+J#zg!UvVZ!oOJZ>ou~E=6}E?wTQ-uHS=1CXz+eTcKOH+kYGm6Vt$BK^65UQtm^ZEXI& zo{8FL%YdKdRx28@ydlVh_g5so^QSc8L$C>tt zT!>ayYBkMmEjRhTi?P~^X(~+aX0-jaPYZ-rp>KiN!FU`M>(HDlZ3)Q;pm-X6ULW`RYUuQNIuC0q7+NoAxMOzx)1dWVKbDh&lJ7h%_@FLWleWU>L90DgURn9$L!6A3 zre?2!g#}sXBd(g27e6$cC)$T-s4#8Wubuua&sJi_LnDGql~wHVett1$G5i<;Wr^H* z{AgV9-JJmBlv8UIVT6G*K}C5v?yL$)$;wBr2@t?<6wCD`*FI~w?DMPd6Aqj*?xWz% zt@R}j<*4pXeP`8(s>S%GpFN5Q|M=i-fU4VQz^o)%vDCAuBlpa~IN# znCI$5M9_}73?vx_qBbu^G8rApH+95W40j#Z5`vh!3i{h!zuc{Q57U@S-o|bDIfRS~ zE$e^$S@c0L7P|&5wk?6e0dyy12YJ z?6@tol-A7?eFPDs)vuZ!3tupOhkca5p1JsxJsu6lMH|10PRep^nPAwH^l{M8cor7$ z=;>3AFdC5R;f@Y>T$m@-#S4pq?75iSeiZt3Y1%;j8wdB?_Gd*-Qp`O6`DR>Fxw8UT zs?xz>x&L!)iy{M8aZ5)LY@(NsxSrX<%2BFj>kQvPL7aVDP%l6tO|P3-@*qlkDClyu zW6fSYsGYC1G{}6~&h^`KltfujR7-q|05emL1sooJ5fF9%+B-IX^52#Tq3Xe79mL~> z>IIW;$BenY-PR1)UWB0YUcP|4%*MKGri#rjsHM9PT@Qduk&u|x)OYXJcjTrtLC_?# zpkUCzLborby1B*l_fLfvSrEhxZ4>G9g=**Qm-M$^OG^Docjf9^e>j0zBYm~orRQ}W zfA&OzwvROc^PbL0S{jiKT3jzdOV*C$I~fofBDwfLOE0C<%!lb} z*d9!T4G&LV-%_F$!!kKJ;Sey*l^Up1(eeFrFe zZ+WMa{3P7o2R|&Sbm@11Q5Nq5w>)jf=Ll1;5es(mblYpS4+^uquQLi`EffVAJz4UF?vcpIM)r3n> z@2L_rWKsZ~2eyukEbjncvP1m01eIg&(x~u)wlJ5i!@t|}p!%by^W;U|9nVtDo2UJxjXqdt3c&S}< z+#`C`dtWlGq!#zH+WGfLs{Yd-rG@em)#G#REyQ*Dmd!6^&(es^KQ!t4ePv79R(vlh zDYeiy8UI~!z4YHQr@vp2*WjCx7$p)s)pluKJ~~T3PE=UJs=JZc3FU{QFYD>wRt2#G z2oe%3PeY^oHwmSO<%|~dfQH4S4|dNYCqC+TLmszk~Q(ZdZRe`gz`)@$*(9Y@G#*kF-UE zE_nlLs);Iw0&zUzDXzp2GOmT1JcV;E&uDE306noYY{zYH39-$0Rqc+Eh2|nu_;KPK zje`Nbi>)H7uX-eZUmFu*&x*5{K|>|s|NFR%-{$B>7(ZRv8;Q|@3KL?qrsOl6Tytsk zvF{ie)#ndHeM~N;x#zAEM{%}uVQ1M_)g+fS`$cqQUr?G88Z%e#MtXE#YCV%|o*`^T=6g>97Lcn8x zAK=pIUhR$}#J09^vXQhkar)G4p`xIb*>_PAZ!lj~*^tYATt&ssG_=1<)w#s6Y3PU_ ztK);8%ilcbL0g`urqUOD`I3{R7|G?DcWd_pz~4(FuU$-Nlm6#?K#W(+%ghR1>0g&k zrXqld#c?r9^Q)rr?+;8&)ck&L5!6PdF`A38Hi3JM(v=}KLk``zH*ctA{rO`h$GP-3 zSc(Plf0~%+sreL1N4yzw`m{J#+hnqP=;H%re_zBWg|Pi{ zvCiY0jg^=mKopSQ=#A5qL#85A8=yDa)1{e0(C`@G?NdUq(f;TLD^HV9|z zD|k|YkyRe`<&_0kfM*VewHYtRyEl$U0=w1!YO(t!9<3`Yn;dN=@iK&H8CdMEtrh=7 zn=U*r1S32Qq`Cg#H2W<%-Ftzc;91_MRFBQPyb=ds_=o;4Bgm(#$T}DDcU85%gaP2} zb1B2Qnss>QKczB|Vyz6#P_ANlBlv9?ecRcr_k&d?;4_ z0>q$}2HTJG9bTppy+Z0BW$PN10a|w9xfvu;PtMYE&51>~ptrWV9Uqba!#0G!Bhej^ zM8>tcFQ&rc!m%}fMS==8tZd<<2M(*JxssDRG<~RnLstu7F&IjUC56%c^6cp1@~yZF zI}9?67q_xv*HC2w((Xh580y0nMWaP~8lWjbCG&JUk-~DAki|L11xC^1Yr7eBbn04o z6fd+Ie#=^lk>PyN*p>18S$_8?xywhsD{t)`K*2pYO)$Su4Yj`aKIG63qLRoiR_r zkSs18%0gX{O!>xkY)NJb#!3*^n}>EFx1e*5Ce-xH9avcWt`Ywp(b3npyA3yf&Ols~75^^Ic#~fX#Yie9 z>rf>>XfTzgetE-y^Q<;3{Y6E8+r*t|t$)Gwf0rOuQ7UXTDN!k^hSb?6GDQ1#3i8;? z`6rq&`p{}HHhY z8A*f@;13cX>#&@Ub>tNmzTBzZ6qE#q-Z8`Hr!e3vNDChXMK;E*plTvr_OweI7hb`A z;L;cquxE~iUD6#t*4r~-rHEzdzx?Q5>>|zWjp4ZkifgkXB5-R#h*6@oky4_uuDx_g zmZ=}wKpDb$^_~FFQd}?R$+kPwVqp^~Uskct2)F}+?8&jx2dc8%MT=eyhfERqYeX1p zmg|#_b&pq~E5_l6;>3Z*j?v|({wQm}Ur zO*_jle+raHlGVMfFOwW=Xaj3Big@HX5N~uJ_MTJK=67$xfkEak zzDf)_K|rj{Ol!17lMZo_bnLiLmip@lLFHs(U*8(^y!=MsHh*mWmGyU;<={lHVYTYS zizgAXueP302U=)IWA9E5jt>_56TNjeFBkmhby&*Bx`X5gT4{$#7ODGj{I=|GL|D>> z9%R=DQV2tH8+A#Mx9x^Ib^WJEWlt~%hl+ck5LeoW$F1@uE*6>UJXRtCie&f|lN($y zZHoXA9@;s%>2g^Szm*3-!~+AHAO&N&#JTb6s8~XbdXx|GerEscyXTB?VCn*=oRI; zl#t{*9mMqA7aK2GjyP2Y#U_DH&S7USt|J~kk64ZZH|!zWqVL2*G@LX zYh?5xNX|e+ANL1ciLT*(JM5EMe`0w~AM`t4J0QBK{9)fFJnMt?3nvrVoP0C=YVk7OmgbAGnxZGD~qQm zd-GoyV_(XGzw#=cMCO-{Nxkpw%VnXUEYUvk56NG<3ScDByT~)*G<%>%Qib;(uI^eDida5Gzs zIYwLj7~E2<4eM*$I^d4>fNlwNCaRsaa##Ct{xkcF9iq4V_S>3tP+aI=zsFp}61Zsd&F1IU*KWZNmKO^&^}rbTrn=Q$ z9CU~F6+7$^d8pbj3)X34kHTf+d^7(zlH9WWPI)CO4Z7M~Ohe+Ou1*%u2OYtg2&wG_ zT4kz006cMJahp_-Xi`)1V1oRUY}7mIFLyxf7)qK4y3UIi)lvWF2>!3-RB+P=fD5e> zOMj1H^Ul{FJ^4IsXWj~knS!L&m$ycFJ2D1>|bpiW?W168_v05xd0SKB#d8aI>T>A0+#{Y3)8RO z581SB3W59cWB9APfDAx&V{>!-?IDQX59q2ewQ2EJSI^CW0R$AI3>_hx2#@C369kGd zo_Ky+uzC)9wm{RU8gxE6OY6_%Ic8;Mlt!AVp>PXYH1EILcpX^cguja)g(nlgi9l^V z=T6TA1%PkZlr=Sp3(dd1nYI9}=RaDd;GlX4z?7~ItQoFY_WmRzRT z0ifXowyPSB1SU$Y@dB3lEVFh4cKvPzoVm;1Iup$lmQ}gi7|Gz+lOlW}DO+6t@DFb0 zp65d)asfs(eg`go#A-vdB-V9f$llTVECdZy;4?)*Zag%4Hd%Q)#aLnn=}*Upxv%k$ z{^ft*luT`Se53>DD_~%1y191M91LP0fstP!v{E&7%$~N85Vk5qet@6R;yH%~I{6YY z#|iXQ_f4hEs>NEF*|l4735tkgfoecNOLF}Ni9S9SM~poacghejcDAX%{5@&rCC@(0 z-1&Ll?;xwn)f^D3$pRHZ*<*i&@bNyZKr`mcSs4fe=aNdgZ5FD5Q+^u$BA!Vm(_z9M+<6@lQMvV z0XNN@{(}c}hK7d1I<9_GONaEI-Y`LR6__WoNYDSEJ~z@BEUej?SL5r3?*O4vxIU%G z1yaERkXW6{M&atB`Cc+fkqxTQ)CErgj;S|ZGMWHHH0JJPqjdRFKyGwQ44rdk17HqH z55!l`Lko-K`PZPh@^#=rLPkmGmWtdNnN(lADMAOH!Dc`9r_sm& z+lTc(p)J6h=x9P&!8sb%lx+YuhrP3)L~c)BiI)R$hlSk@$m$2#WIfwBVSlN;F6){?058v(;MZ}yN;_^Sk1_*!(^ie64}ls-S5 zhkTSu_4k1wh(WHBt0#nu195}V#lyp6%iRFaSfnU~S&@p?h7btMY6TkrX`e!37T;RG zkdCJFCKWGI4-cO< zcHGK+j~+jEa6pg!278SprrV&lpCdBq+wX}d>m71e!g3AZjp$}9lV7|9NF8)?jL(P0 zM*wL27H?2;T5^sDpC{K9tDf9LoJ1l$I)8?_LE`UKOLkFEv*dJMHN4xR&M}euCG<?$8fhEw=JAq`h7{bpG7->7^w0A@u_>1GwvhU`HG@#uD?# zQ@hN*9Xyu0rQZqp3_xvtwh#DEd=8)gbr1cAb!iDDR$;=4_ohJZLiR(FptU34hkB|^ zXyLBJk(b%vmNnS;&WnO}1GpRJISk>01wL;%hu^7lb;UFVgdS#FHeJGjh~qr~Sq3iq z@Z_lvyxzNhznRuZ06>Z;DZyK(z8_sf;YconHoefoSHy6RT7(7-Mx* zA-zzvUpm2x?l<0xW>_q1*UK9(FYv~XP;B{=2yqLHm9GWIJa5bDQXOte|A?ol zsi|X35pLNU>C$go_@IHR7O3jMImb{A0w2(L6erV)r8-nco1mr2XIy+A8mVv9xOTIvsaCPqT!xP$R$gFg$Bt9N!IneB!@d!k<1y5nm_RU z!Hf+{@i840M#2&@tw2SKLuf+*gceRnLj{FBFv~Wi9ws%Bf3**&m8fUg zbv1{1g6F?B0L&qxowMWjKy}Fn&|m2w0L?#7n)M!9(zAQ_4xqRrgw+%}aQ3M~V8!uL zR5BBNZ~=GmFUFl_T*)@tO>zgo}y~yx7`M*6g;9y_BdT|Jc<-o zie<;3!k#LoNMPA0tMm4l)9zDnY0Sk3<8w!a1YUrB`hZ)%jEaDOz@HY!^T9oDaryl7 zoo{h&(RX2Cw{dh71Lk^~?43JV-I))RAdV1KHf@^kV33%@Xp#9B8im{J25gz{0I9HS z7y!AYSgi!yscwyxJ#(08{P;1VFrP)UQc9@k}wKH2i_#f0aPk>aB z?ichCDwD5yRJy_xf0qKn^D8av&`(gKtR)<`iOS;smdV!+lM9oehd@x#+nvvZOE199 zkn4)xiiYAt2-<<45ku<;_WY^f5T*z&Q+~k8+5S1X{{6QwzJZo}^6%Z>eHMbIYfU1k zR11;nx&U>I+WIsXJ)lV2`t_^Tt&ar-G{E7h1~bY9GtNf3{7i~5n`r_W)UqF9uaUP1 z>a-IstX?sqzmhWQi_1smzsqJp{WnBy46`kbcFrUn-;LH|MQ~bk+Tx ztUUjFzd4Q}5S}b(uJ&i7agN6K;F*IR$qNdHS-mjGVfuYq%4ETj#?EL9*s@%%snGR6 zR_=-d67uo_eS~8-ZHHcsSc6)&@`r7N3YI1PLPrWL8MmKMA2YbzQ)>_bjSgqn8TaOY zC{6Ht!~|G^xUB`>v;|eC4?GPUdNJ_p`}xmM77G0|SIqt$8LncQ%8J84wj6UKFPb1O z39LpsYU%8NVU(bm zrS*DTF`NSh|HW?|l}A;=Ed{9s>UjB74sOWG2d`XxFWey`z(8<8s2mq<_y&e>1DNBw zVAKSd=Zk#gKtf4`$lJKUh`mVfut<$WRMZ&tsuI(3O4C`bnLpc7MbEv?eO($+7Q2=H zrOPqy{STJ*eu0Aie2gsdu+ti5pQTrG9oFqH=Hp|WUvuh>6kGP!G+Ov=SIq4=r?FkI zW1g4arrTD}39OCdMS=$TDZg>SQ;4sRC6Kg%0-Fa6sviOVs&B>mmbXIHTeI6lK zF#a!}KQFc|Bqb#wUi0ztlJC%V(%@FsjSyp*-zFad~6f)fzQ9CUN25pj)Hl(J$ zqYHWTZ&RfIyaGD4;5ND(V8=o2%5qz5cjex3HhaRSuJv>>0VCjz%%~hA2I7B@83mwz zK}Zimz_Qfb3ZcGfX><$VvO+@2_ps7>E-u26Oy%a*3n#A^Z8(6m_4b43pf2vl}am9_T^6RhJVmBFTt3^|xraVlGa0Yixq7 z9r=Wkpq2ndZ>4GoF|uWrZ8cf`vojuTi%?aJzN6}zB0-G9D$pVfPw5i=+6Cc4O6UcC zvI*x-G_%@Ywfo*gl4h&v8j@7wqS*qB@qOlzn^mEr;!3`OmZTo}aB{^GB4k#cLd|cX zkie}?vclswL~uSbJiO&~bZA2F75*C6_QC4FMgg?u3x>iOKTg0ACcN~EoHclHPV|&9 zln_IBXz_~?&9zG78t}HxvWXFG9;mRUhzNAy$HqD>AzmHjQuPm==bNAtBO1QZ&va;PO7#qr9g4683=D9MH21fj=2y%{n7FJ1DM5pW0% z459(!6B9v0ox7lg&D^;ZHeSai-DFq-v$a7~524L`1E5%7CE%x|C{&y*?c_*=m&~1# z(T!_G-W;DXEeN*H(sJQl&w6N-yD98oD2WJSY^I>aq(prV@xj~6n#**vw>H6fTeU+b zriSQD$}tuU)zW#MG=SYZpJ#_*4~PgInwpi%o%hKL(xtn?#)y}8+|JT@0fxNI{8e4} z>r4!VW9JPZVk+wnKqP^!m7p1prUISQIkU8L&XHOHB6We)_Y~+rXfd+Q0VLjl*O+v5 zTc!b)rkPRv5~l7od35n#?t`TQ=qqZ@1sVQ_iD}x-(?HH-a^ozh92@mzhDYoewf!cXDBi){VL?;!N)koFRR zv$XDIJ8Z-#qU_)xWPo_X<-NHewTSUG=7W+&_v-IE*>%<0BAaRsPr+PTT~-nBclN;W zXaU-WVi~1|5U5EWz;7g2SE)!9RfPEgh)e?-uKsjmw&1sa9>Zik-ELm#mEnTr6tFWW|DBdQM@--xRu1fy>f#8^(n-A z5aN7j{q-w;a@lUr&`eJw91gEbaoHRxtcxPkVV}v~8i3D;3&QZblr$+F)&~L0HRgzXgaO}@gD_y~9YKM!3fo(#vqm%l zjU~iDsuU^?37~^zA8jx~(RM{m;%WzGE6=_OO^^sXCUgNkP4seL|6wHlVKnY5n? z)T7lkjA0pZX2G1A8_@2-*blvW|FXpUSM?7r=R-E6+}~-Gl$6Z-vHK~&PwLMm#l+Oq zCpeoXYZWS(s>EBo$4B;as4&HgYN5lrfGlV(J7)VYdh)0|gWPtqkZVCNd3nB2{rbWU5YE%i*VcC7i28xmwhCGDc z1UIy7#M@?;xPg!~T@bNbs%{_TAQcZ{_Sq7i`07CvP_hoQ(2QtOJIS2PFFfa{o?MTwd#J@+)e;pV3B0fx8S(e`~X^QN=q>e)hJ@Ohn zgOj9s8E*WqWSG}|Mpp-7EpI%p=u9pVc~bjeeU zuybZg^|us{3o}@fw%Q_8#n ztAgA8xbS9RY{M{d_mIpL1Ifm7!;rlaYV;yO5r*A)nOICN(BVaMIC3HbZWo9H?rw_b zQ2~@7@XMKq@n46BgyZHum%uK-(XSi3s+(Sf@-xQ1eOkNzHs~s0#%Go3@#Dui0q4Kr zlb=H8w{~_eYaq=HA2SwNGD`y?7oaX-M>ebCp9=$Uffu}`FE4Mj#$0?&L~#@-cYYRZ zZXJ9VS83kL6+@St9E^4jrlNubQo-kGX7KR&m_)+N(a<7VceXQiAjHb*7C_5|bs_0H zYC|D>8hgsNVruu6yn*QO7KvahXuFNV6t3Akre~%d_u*ULpJ&%EdxqaUs>h9k$|QEg z%m{;%P;rrkGW=AQS11ltvkGJ@$GjO>>Pm=$&Y7^Mhd+5Us))dSrAQt!Vl@gK(;(Pk`;nkdgV|*7Pb~?jl+@(l~blE&+vX<$0s*@3wt`m2Tgcc119R079Y>i90CX6rER+7Y{O2 zXyd_oUN7*jtl|CGrIP1>IR-4Id_7?j2u31_<`XxFMZ2vX#uz{3E)_$>y&z>Kycn2< z{XLP@Z&|nRfu{#>^Z+o1iAE{$`DrQxSmBFyP@oh=EznRz1GJ=NUjzxOulI^60>;Y9 z@Ip`>)JV8eap&i;YW3|)yrebw@TEcU5)Z3O1xL92ixonOcpEvG-9557^eQBQ(K0RJ zW;_hB+w}y3KA4!DZmVgmg@G7A!(J7E8$QoSFA0jVTRKvnZle?o^Y#UoWdvOwVvc!> zDZNcBU!<3KMT@E&l&=T5KLzD3@LbjllOG%LZhY+cFTrT?RH{nBtvk&g7Q$qOqFx!Q zR!HG^ADIOJfFImbx@&NcSQVa36DiFvvLm##Ne?DOert(mK#(r2DlnsZ;A}xO)QXe+ zxOvaZx-r58p*&ckejbd)JZW8AZKeZ$YtOcdiKro#XFv*PVdJMz8SD@HXkI*!#Y|c( zt%D6oBVeXPL?iTZqbp$_&fRWrY!EjKt!r0!$?W)4;bfueA!xSzt4Fkdi@RD9TTWU4 zrOT0DfuiJKs6H~>u{KnXe34+7+}1CO6w~C{{ylZHG55^w&cpEQqhk9| zl>_g(U};O-u%^@S$Y2YLR7qRjNOlVDWbq0dCxTzly032j^8pP0_jZJa)m($dC9r!S z$!%)GZ6ej|e&#d4juDLu!Z;cLZJ;869owA`d6gacfjzZmiFxDKdH!P!s{VV8CksU9 z&pSy_H!2hN>}&YTDmAG(-9e%fZBSv00vX~3tOp|?-mKB781~l|Ar>Wlj2aW^6LJYZ zh3(P-BX!@xnJs>!b{jtj(tusoAvVHj9UaPJwF^FnZ}IITef&2}qxYWV|38%P-=(+U zPf{4K?GLvp@LBPVcqD3Hn08l#?F26$pU;8s!O|ZBxwMh7tJkG}W3&#oE7lO%cA)}lzN4V%yMyLy0q$7S;QQ@Y7x|KcriV2-kJshOBZ+>Umgi19W`WAr&CN#it;h6)Y zRbZI%)`ry{=RD~$U86wfc{tSjzLAj%pHY#G=@U=`0FrOGgY3CigI?!+o391QKg!zb zy~0!>xqlhS6#OT&`9BXmo{8xbuxl}lE{?4vLf{exqfZ!onn2y6+VZlkbg{J4l(^){ zTwIAC435_LO|!?@4?I`^D5`B?C;~4BsmA7MOYul($w|>Cr$WLUm0)jx_La~qP@)`@ z>T;DHk?}LR3C>XGlD~CL zpYT;wRPOsMiSdjTVOi*Lg}u%^bKZm<&rUfNR{oPX@^_UDp3;+-S8=LPGo^43hGUW^ z7wt+6^~@C7+LSNER+33lJ&~FjvNAy9K}Z$?Qh1{$i|GKlJ4u2fNamSTA5(G(ety^; zfrfUK2wYVXRhGqE)&OGlwcwY48X)Z45@bXR={Jn3bMWzXE%z--SW|k&UNx4vcfIB; zcb+Ft1zlB_mzx3}E8GxKI{+J<|Fx1ew>s4xCX zQN%Ux+*Iz|r0RsU&y#)|ui<1P)1|@)Qd|vS+#$9rGH#3)FMleSRz3YxOU#hxWX<#} z(#;yh?m_shs$Pp##z?V0^)Xm2a!ssc8ve_x20mOeGXV`guI^;SLFvh{*(M_L#&TIS3+p=zc_3l43#RftD zQN^Srj(xtncJ$J5y|jRx-|o7?C$vz^q<{|jKZXt(UH||9 literal 0 HcmV?d00001 diff --git a/manifest.json b/manifest.json new file mode 100644 index 00000000..716cc4fb --- /dev/null +++ b/manifest.json @@ -0,0 +1,121 @@ +{ + "dxt_version": "0.1", + "name": "linkedin-mcp-server", + "display_name": "LinkedIn MCP Server", + "version": "1.0.0", + "description": "A Model Context Protocol server for LinkedIn profile and company data scraping", + "long_description": "# LinkedIn MCP Server\n\nA comprehensive MCP server that enables AI assistants to interact with LinkedIn profiles, companies, and job postings. Features include profile scraping, company analysis, and job details extraction with automatic login handling.\n\n## Features\n- **Profile Scraping**: Extract detailed LinkedIn profile information including work history, education, and skills\n- **Company Analysis**: Get comprehensive company data and insights\n- **Job Details**: Retrieve job posting information using LinkedIn URLs\n- **Session Management**: Automatic login and session handling\n\n## Security & Privacy\n- Credentials stored securely in system keychain\n- Headless browser operation for privacy\n- Automatic session cleanup\n- Respects LinkedIn's rate limiting\n\nโš ๏ธ Use responsibly and in accordance with LinkedIn's Terms of Service.", + "author": { + "name": "Daniel Sticker", + "email": "daniel@sticker.name", + "url": "https://daniel.sticker.name/" + }, + "homepage": "https://github.com/stickerdaniel/linkedin-mcp-server", + "documentation": "https://github.com/stickerdaniel/linkedin-mcp-server#readme", + "support": "https://github.com/stickerdaniel/linkedin-mcp-server/issues", + "license": "MIT", + "keywords": ["linkedin", "scraping", "mcp", "profiles", "companies", "jobs"], + "icon": "assets/icons/linkedin.svg", + "screenshots": ["assets/screenshots/screenshot.png"], + "server": { + "type": "binary", + "entry_point": "docker", + "mcp_config": { + "command": "docker", + "args": [ + "run", "-i", "--rm", + "-e", "LINKEDIN_EMAIL={{user_config.linkedin_email}}", + "-e", "LINKEDIN_PASSWORD={{user_config.linkedin_password}}", + "stickerdaniel/linkedin-mcp-server" + ] + } + }, + "tools": [ + { + "name": "get_person_profile", + "description": "Get detailed information from a LinkedIn profile including work history, education, skills, and connections", + "parameters": { + "type": "object", + "properties": { + "profile_url": { + "type": "string", + "description": "LinkedIn profile URL (e.g., https://www.linkedin.com/in/username/)" + } + }, + "required": ["profile_url"] + } + }, + { + "name": "get_company_profile", + "description": "Extract comprehensive company information and details", + "parameters": { + "type": "object", + "properties": { + "company_url": { + "type": "string", + "description": "LinkedIn company URL (e.g., https://www.linkedin.com/company/company-name/)" + } + }, + "required": ["company_url"] + } + }, + { + "name": "get_job_details", + "description": "Retrieve specific job posting details using LinkedIn job URLs", + "parameters": { + "type": "object", + "properties": { + "job_url": { + "type": "string", + "description": "LinkedIn job URL (e.g., https://www.linkedin.com/jobs/view/123456789)" + } + }, + "required": ["job_url"] + } + }, + { + "name": "close_session", + "description": "Properly close browser session and clean up resources", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + } + ], + "user_config": { + "linkedin_email": { + "title": "LinkedIn Email", + "description": "Your LinkedIn account email address", + "type": "string", + "required": true, + "sensitive": true + }, + "linkedin_password": { + "title": "LinkedIn Password", + "description": "Your LinkedIn account password", + "type": "string", + "required": true, + "sensitive": true + }, + "headless_mode": { + "title": "Headless Mode", + "description": "Run browser in headless mode (recommended for production)", + "type": "boolean", + "default": true, + "required": false + }, + "debug_mode": { + "title": "Debug Mode", + "description": "Enable detailed logging for debugging", + "type": "boolean", + "default": false, + "required": false + } + }, + "compatibility": { + "dxt_version": ">=0.1", + "docker_version": ">=20.0.0", + "platforms": ["darwin", "linux", "win32"] + } +} From 3bdcc7184aa1a2aa4b7a452283dbe33b88291ca4 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 21:26:41 -0400 Subject: [PATCH 052/565] fix(readme): Update DXT extension installation link to the latest release --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 789adec3..ddc1ca02 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 Choose your preferred installation method: [![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP_Server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) -[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-purple?style=for-the-badge&logo=anthropic)](./linkedin-mcp-server-1.0.0.dxt) +[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-purple?style=for-the-badge&logo=anthropic)](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) [![Development](https://img.shields.io/badge/Development-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) --- From 0115447bcd431fac0fd0343b8a7899bf52c5007d Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 21:26:56 -0400 Subject: [PATCH 053/565] fix(readme): Update DXT extension download link to the latest release --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ddc1ca02..6944b8d0 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,7 @@ docker pull stickerdaniel/linkedin-mcp-server **Prerequisites:** [Claude Desktop](https://claude.ai/desktop) installed **One-click installation** for Claude Desktop users: -1. Download the [DXT extension](./linkedin-mcp-server-1.0.0.dxt) +1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) 2. Double-click to install into Claude Desktop 3. Configure your LinkedIn credentials when prompted 4. Start using LinkedIn tools immediately From e3fcda8f1ebbe96db5e785207e25e451cd9cb7b7 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 22:02:44 -0400 Subject: [PATCH 054/565] chore(docker): Update .dockerignore for DXT extension and modify README prerequisites --- .dockerignore | 10 ++++++++++ README.md | 2 +- manifest.json | 18 ++---------------- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/.dockerignore b/.dockerignore index f36c54d1..a67e2756 100644 --- a/.dockerignore +++ b/.dockerignore @@ -152,3 +152,13 @@ cython_debug/ .github README.md .DS_Store + +# DXT Extension +*.dxt +assets/* + +# other dev files +.vscode +.claude +.github +.docker diff --git a/README.md b/README.md index 6944b8d0..0360606f 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ docker pull stickerdaniel/linkedin-mcp-server ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) -**Prerequisites:** [Claude Desktop](https://claude.ai/desktop) installed +**Prerequisites:** [Claude Desktop](https://claude.ai/download) installed **One-click installation** for Claude Desktop users: 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) diff --git a/manifest.json b/manifest.json index 716cc4fb..0d877e9a 100644 --- a/manifest.json +++ b/manifest.json @@ -39,7 +39,7 @@ "properties": { "profile_url": { "type": "string", - "description": "LinkedIn profile URL (e.g., https://www.linkedin.com/in/username/)" + "description": "LinkedIn profile URL (e.g., https://www.linkedin.com/in/stickerdaniel/)" } }, "required": ["profile_url"] @@ -89,7 +89,7 @@ "description": "Your LinkedIn account email address", "type": "string", "required": true, - "sensitive": true + "sensitive": false }, "linkedin_password": { "title": "LinkedIn Password", @@ -97,20 +97,6 @@ "type": "string", "required": true, "sensitive": true - }, - "headless_mode": { - "title": "Headless Mode", - "description": "Run browser in headless mode (recommended for production)", - "type": "boolean", - "default": true, - "required": false - }, - "debug_mode": { - "title": "Debug Mode", - "description": "Enable detailed logging for debugging", - "type": "boolean", - "default": false, - "required": false } }, "compatibility": { From d59692ce7b7087265fac65dcda7dddc3150285e8 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Mon, 30 Jun 2025 22:21:49 -0400 Subject: [PATCH 055/565] chore(readme): Update badge colors for Docker Hub and DXT extension installation links refactor(smithery): Remove optional ChromeDriver configuration from YAML --- README.md | 4 ++-- smithery.yaml | 16 ---------------- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 0360606f..02d1b15c 100644 --- a/README.md +++ b/README.md @@ -35,8 +35,8 @@ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 Choose your preferred installation method: -[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP_Server-2496ED?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) -[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-purple?style=for-the-badge&logo=anthropic)](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) +[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP_Server-008fe2?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) +[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) [![Development](https://img.shields.io/badge/Development-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) --- diff --git a/smithery.yaml b/smithery.yaml index 337fe9a8..4ece65eb 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -12,22 +12,6 @@ startCommand: LINKEDIN_PASSWORD: type: string description: Password for LinkedIn login - CHROMEDRIVER: - type: string - description: Path to the ChromeDriver binary. Optional if ChromeDriver is in PATH. - commandFunction: - # A JS function that produces the CLI command based on the given config to start the MCP on stdio. - |- - (config) => ({ - command: 'python', - args: ['main.py', '--no-setup'], - env: { - LINKEDIN_EMAIL: config.LINKEDIN_EMAIL || '', - LINKEDIN_PASSWORD: config.LINKEDIN_PASSWORD || '', - CHROMEDRIVER: config.CHROMEDRIVER || '' - } - }) exampleConfig: LINKEDIN_EMAIL: example.user@example.com LINKEDIN_PASSWORD: yourLinkedInPassword - CHROMEDRIVER: /usr/local/bin/chromedriver From ced0729f16a8a4baccc21084bf3b0e927b740f93 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 00:20:42 -0400 Subject: [PATCH 056/565] chore(vscode): add bundling task for DXT extension and remove .claude from version control --- .claude/settings.local.json | 11 -------- .gitignore | 3 +++ .vscode/tasks.json | 52 +++++++++++++++++++++++++++++++++---- 3 files changed, 50 insertions(+), 16 deletions(-) delete mode 100644 .claude/settings.local.json diff --git a/.claude/settings.local.json b/.claude/settings.local.json deleted file mode 100644 index eddd25c8..00000000 --- a/.claude/settings.local.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "permissions": { - "allow": [ - "WebFetch(domain:github.com)", - "WebFetch(domain:docs.astral.sh)", - "Bash(ty check:*)", - "Bash(./build.sh)" - ], - "deny": [] - } -} diff --git a/.gitignore b/.gitignore index ed938ce6..5f505b3a 100644 --- a/.gitignore +++ b/.gitignore @@ -193,3 +193,6 @@ cython_debug/ # DXT extension packages (too large for git) *.dxt + +# claude code settings +.claude diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 6f0ca7c4..7873fa08 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -6,7 +6,12 @@ "detail": "Run pre-commit hooks on all files", "type": "shell", "command": "uv", - "args": ["run", "pre-commit", "run", "--all-files"], + "args": [ + "run", + "pre-commit", + "run", + "--all-files" + ], "group": { "kind": "test", "isDefault": true @@ -23,7 +28,13 @@ "detail": "Run main.py in debug mode with visible window and login immediately", "type": "shell", "command": "uv", - "args": ["run", "main.py", "--debug", "--no-headless", "--no-lazy-init"], + "args": [ + "run", + "main.py", + "--debug", + "--no-headless", + "--no-lazy-init" + ], "group": { "kind": "build", "isDefault": false @@ -34,12 +45,18 @@ "focus": true }, "problemMatcher": [] - }, { + }, + { "label": "uv run main.py --no-headless --no-lazy-init", "detail": "Run main.py with visible window and login immediately", "type": "shell", "command": "uv", - "args": ["run", "main.py", "--no-headless", "--no-lazy-init"], + "args": [ + "run", + "main.py", + "--no-headless", + "--no-lazy-init" + ], "group": { "kind": "build", "isDefault": true @@ -56,7 +73,12 @@ "detail": "Follow Claude Desktop logs", "type": "shell", "command": "tail", - "args": ["-n", "20", "-F", "~/Library/Logs/Claude/mcp*.log"], + "args": [ + "-n", + "20", + "-F", + "~/Library/Logs/Claude/mcp*.log" + ], "isBackground": true, "presentation": { "reveal": "always", @@ -64,6 +86,26 @@ "focus": false }, "problemMatcher": [] + }, + { + "label": "bunx @anthropic-ai/dxt pack", + "detail": "Bundle the DXT extension using bunx", + "type": "shell", + "command": "bunx", + "args": [ + "@anthropic-ai/dxt", + "pack" + ], + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "reveal": "always", + "panel": "new", + "focus": true + }, + "problemMatcher": [] } ] } From de0f3b8170af438cefd541cf4f5905e438410255 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 04:21:41 -0400 Subject: [PATCH 057/565] fix(dxt): push version 1.0.0 and fix environment variable syntax in manifest --- linkedin_mcp_server/__init__.py | 2 +- manifest.json | 5 +++-- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/linkedin_mcp_server/__init__.py b/linkedin_mcp_server/__init__.py index 512ac925..230ec7ec 100644 --- a/linkedin_mcp_server/__init__.py +++ b/linkedin_mcp_server/__init__.py @@ -1,4 +1,4 @@ # src/linkedin_mcp_server/__init__.py """LinkedIn MCP Server package.""" -__version__ = "0.1.0" +__version__ = "1.0.0" diff --git a/manifest.json b/manifest.json index 0d877e9a..ac6b211c 100644 --- a/manifest.json +++ b/manifest.json @@ -24,8 +24,8 @@ "command": "docker", "args": [ "run", "-i", "--rm", - "-e", "LINKEDIN_EMAIL={{user_config.linkedin_email}}", - "-e", "LINKEDIN_PASSWORD={{user_config.linkedin_password}}", + "-e", "LINKEDIN_EMAIL=${user_config.linkedin_email}", + "-e", "LINKEDIN_PASSWORD=${user_config.linkedin_password}", "stickerdaniel/linkedin-mcp-server" ] } @@ -100,6 +100,7 @@ } }, "compatibility": { + "claude_desktop": ">=0.10.0", "dxt_version": ">=0.1", "docker_version": ">=20.0.0", "platforms": ["darwin", "linux", "win32"] diff --git a/pyproject.toml b/pyproject.toml index 6453b6a6..149879f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "linkedin-mcp-server" -version = "0.1.0" +version = "1.0.0" description = "MCP server for LinkedIn profile, company, and job scraping with Claude AI integration. Supports direct profile/company/job URL scraping with secure credential storage." readme = "README.md" requires-python = ">=3.12" diff --git a/uv.lock b/uv.lock index 5a5635e3..4aaa6cb0 100644 --- a/uv.lock +++ b/uv.lock @@ -422,7 +422,7 @@ wheels = [ [[package]] name = "linkedin-mcp-server" -version = "0.1.0" +version = "1.0.0" source = { virtual = "." } dependencies = [ { name = "httpx" }, From db4be7c731c180ef7c8801a5d3e690dc33d14cca Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 04:29:11 -0400 Subject: [PATCH 058/565] chore(readme): update badge colors for Docker Hub and Development links --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 02d1b15c..30f5be81 100644 --- a/README.md +++ b/README.md @@ -35,9 +35,9 @@ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 Choose your preferred installation method: -[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP_Server-008fe2?style=for-the-badge&logo=docker)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) +[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) [![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) -[![Development](https://img.shields.io/badge/Development-Local_Setup-green?style=for-the-badge&logo=github)](#%EF%B8%8F-local-setup-develop--contribute) +[![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#%EF%B8%8F-local-setup-develop--contribute) --- From f0a6b7323dbb9307831a7b6e04d8bb7bcc647d62 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 04:37:59 -0400 Subject: [PATCH 059/565] fix(smithery): update startCommand to use Docker with LinkedIn credentials --- smithery.yaml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/smithery.yaml b/smithery.yaml index 4ece65eb..6a3ffbc0 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -1,7 +1,16 @@ # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml startCommand: - type: stdio + command: docker + args: + - run + - -i + - --rm + - -e + - LINKEDIN_EMAIL=${LINKEDIN_EMAIL} + - -e + - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} + - stickerdaniel/linkedin-mcp-server configSchema: # JSON Schema defining the configuration options for the MCP. type: object From 6c74488a7722a70b87c73a5b55fb82672af7fbf0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 04:47:20 -0400 Subject: [PATCH 060/565] fix(docker): remove --no-lazy-init option from CMD in Dockerfile to prevent unnecessary logins --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7c7b2e4c..003c7c49 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,4 +25,4 @@ RUN adduser -D -u 1000 mcpuser && chown -R mcpuser:mcpuser /app USER mcpuser # Default command -CMD ["uv", "run", "python", "main.py", "--no-setup", "--no-lazy-init"] +CMD ["uv", "run", "python", "main.py", "--no-setup"] From 7b51896b4842e50834db29e563695946e131ea98 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 04:58:27 -0400 Subject: [PATCH 061/565] fix(smithery): update smithery.yaml to use correct v1 format --- smithery.yaml | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/smithery.yaml b/smithery.yaml index 6a3ffbc0..548429bd 100644 --- a/smithery.yaml +++ b/smithery.yaml @@ -1,8 +1,8 @@ -# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml - -startCommand: - command: docker - args: +# Smithery configuration file: https://smithery.ai/docs/build/project-config +version: 1 +start: + command: + - docker - run - -i - --rm @@ -11,16 +11,19 @@ startCommand: - -e - LINKEDIN_PASSWORD=${LINKEDIN_PASSWORD} - stickerdaniel/linkedin-mcp-server - configSchema: - # JSON Schema defining the configuration options for the MCP. - type: object - properties: - LINKEDIN_EMAIL: - type: string - description: Email for LinkedIn login - LINKEDIN_PASSWORD: - type: string - description: Password for LinkedIn login - exampleConfig: - LINKEDIN_EMAIL: example.user@example.com - LINKEDIN_PASSWORD: yourLinkedInPassword +configSchema: + # JSON Schema defining the configuration options for the MCP. + type: object + properties: + LINKEDIN_EMAIL: + type: string + description: Email for LinkedIn login + LINKEDIN_PASSWORD: + type: string + description: Password for LinkedIn login + required: + - LINKEDIN_EMAIL + - LINKEDIN_PASSWORD +exampleConfig: + LINKEDIN_EMAIL: example.user@example.com + LINKEDIN_PASSWORD: yourLinkedInPassword From 70fcd9d66a7d0dfb5f45135fc30d1c82f7671776 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:04:59 -0400 Subject: [PATCH 062/565] docs(readme): update prerequisites to include Docker installation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 30f5be81..a18618a2 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ docker pull stickerdaniel/linkedin-mcp-server ## ๐Ÿ“ฆ Claude Desktop (DXT Extension) -**Prerequisites:** [Claude Desktop](https://claude.ai/download) installed +**Prerequisites:** [Claude Desktop](https://claude.ai/download) and [Docker](https://www.docker.com/get-started/) installed **One-click installation** for Claude Desktop users: 1. Download the [DXT extension](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) From 7c176762b0b48e00f8e2aa8989cb36c97492d0b0 Mon Sep 17 00:00:00 2001 From: Daniel Sticker Date: Tue, 1 Jul 2025 05:23:14 -0400 Subject: [PATCH 063/565] docs(readme): reorganize sections and update feature descriptions for clarity --- README.md | 50 ++++++++++++++++++++++---------------------------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index a18618a2..8ab41c26 100644 --- a/README.md +++ b/README.md @@ -2,24 +2,15 @@ A Model Context Protocol (MCP) server that enables interaction with LinkedIn through Claude and other AI assistants. This server allows you to scrape LinkedIn profiles, companies, jobs, and perform job searches. +## Installation Methods -https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 - - -## Features & Tool Status +[![Docker](https://img.shields.io/badge/Docker_Hub-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](#-docker-setup-recommended-universal) +[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](#-claude-desktop-dxt-extension) +[![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#-local-setup-develop--contribute) -### Working Tools -- **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections -- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details -- **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs -- **Session Management** (`close_session`): Properly close browser sessions and clean up resources - -### Tools with Known Issues -- **Job Search** (`search_jobs`): Currently experiencing ChromeDriver compatibility issues with LinkedIn's search interface -- **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods -- **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) +https://github.com/user-attachments/assets/eb84419a-6eaf-47bd-ac52-37bc59c83680 -## ๐ŸŽฏ Usage Examples +## Usage Examples ``` Get Daniel's profile https://www.linkedin.com/in/stickerdaniel/ @@ -31,13 +22,18 @@ Analyze this company https://www.linkedin.com/company/docker/ Get details about this job posting https://www.linkedin.com/jobs/view/123456789 ``` -## Installation Methods +## Features & Tool Status -Choose your preferred installation method: +### Working Tools +- **Profile Scraping** (`get_person_profile`): Get detailed information from LinkedIn profiles including work history, education, skills, and connections +- **Company Analysis** (`get_company_profile`): Extract company information with comprehensive details +- **Job Details** (`get_job_details`): Retrieve specific job posting details using direct LinkedIn job URLs +- **Session Management** (`close_session`): Properly close browser sessions and clean up resources -[![Docker Hub](https://img.shields.io/badge/Docker_Hub-Universal_MCP-008fe2?style=for-the-badge&logo=docker&logoColor=008fe2)](https://hub.docker.com/r/stickerdaniel/linkedin-mcp-server) -[![Install DXT Extension](https://img.shields.io/badge/Claude_Desktop_Extension-d97757?style=for-the-badge&logo=anthropic)](https://github.com/stickerdaniel/linkedin-mcp-server/releases/latest/download/linkedin-mcp-server.dxt) -[![Development](https://img.shields.io/badge/Development-Local_Setup-ffd343?style=for-the-badge&logo=python&logoColor=ffd343)](#%EF%B8%8F-local-setup-develop--contribute) +### Tools with Known Issues +- **Job Search** (`search_jobs`): Currently experiencing ChromeDriver compatibility issues with LinkedIn's search interface +- **Recommended Jobs** (`get_recommended_jobs`): Has Selenium method compatibility issues due to outdated scraping methods +- **Company Profiles**: Some companies may have restricted access or may return empty results (need further investigation) --- @@ -83,7 +79,7 @@ docker run -i --rm \