diff --git a/.basedpyright/baseline.json b/.basedpyright/baseline.json index 1008fd244b..3e0d959d54 100644 --- a/.basedpyright/baseline.json +++ b/.basedpyright/baseline.json @@ -1,5 +1,23 @@ { "files": { + "./build/py3_8/generate_docstubs.py": [ + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 8, + "endColumn": 71, + "lineCount": 1 + } + }, + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 8, + "endColumn": 70, + "lineCount": 1 + } + } + ], "./build/py_latest/docs_macros.py": [ { "code": "reportAny", @@ -10,7 +28,49 @@ } } ], - "./build/py_latest/generateUnicodeTables.py": [ + "./build/py_latest/localization_helper.py": [ + { + "code": "reportDeprecated", + "range": { + "startColumn": 44, + "endColumn": 48, + "lineCount": 1 + } + }, + { + "code": "reportDeprecated", + "range": { + "startColumn": 61, + "endColumn": 66, + "lineCount": 1 + } + }, + { + "code": "reportDeprecated", + "range": { + "startColumn": 14, + "endColumn": 18, + "lineCount": 1 + } + }, + { + "code": "reportDeprecated", + "range": { + "startColumn": 24, + "endColumn": 29, + "lineCount": 1 + } + }, + { + "code": "reportIncompatibleUnannotatedOverride", + "range": { + "startColumn": 13, + "endColumn": 22, + "lineCount": 1 + } + } + ], + "./build/py_latest/upstream/generateUnicodeTables.py": [ { "code": "reportUnusedCallResult", "range": { @@ -124,54 +184,100 @@ } } ], - "./build/py_latest/localization_helper.py": [ + "./build/py_latest/upstream/updateTypeshed.py": [ { - "code": "reportDeprecated", + "code": "reportUnusedImport", "range": { - "startColumn": 44, - "endColumn": 48, + "startColumn": 7, + "endColumn": 9, "lineCount": 1 } }, { - "code": "reportDeprecated", + "code": "reportUnknownParameterType", "range": { - "startColumn": 61, - "endColumn": 66, + "startColumn": 4, + "endColumn": 19, "lineCount": 1 } }, { - "code": "reportDeprecated", + "code": "reportMissingTypeArgument", "range": { - "startColumn": 14, - "endColumn": 18, + "startColumn": 51, + "endColumn": 78, "lineCount": 1 } }, { - "code": "reportDeprecated", + "code": "reportUnusedCallResult", "range": { - "startColumn": 24, - "endColumn": 29, + "startColumn": 8, + "endColumn": 88, "lineCount": 1 } }, { - "code": "reportIncompatibleUnannotatedOverride", + "code": "reportUnusedCallResult", "range": { - "startColumn": 13, - "endColumn": 22, + "startColumn": 8, + "endColumn": 61, "lineCount": 1 } - } - ], - "./build/py3_8/generate_docstubs.py": [ + }, { "code": "reportUnusedCallResult", "range": { "startColumn": 8, - "endColumn": 71, + "endColumn": 104, + "lineCount": 1 + } + }, + { + "code": "reportUnknownVariableType", + "range": { + "startColumn": 4, + "endColumn": 10, + "lineCount": 1 + } + }, + { + "code": "reportUnknownVariableType", + "range": { + "startColumn": 4, + "endColumn": 15, + "lineCount": 1 + } + }, + { + "code": "reportUnknownMemberType", + "range": { + "startColumn": 18, + "endColumn": 31, + "lineCount": 1 + } + }, + { + "code": "reportUnknownMemberType", + "range": { + "startColumn": 18, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportUnknownVariableType", + "range": { + "startColumn": 11, + "endColumn": 22, + "lineCount": 1 + } + }, + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 12, + "endColumn": 44, "lineCount": 1 } }, @@ -179,7 +285,87 @@ "code": "reportUnusedCallResult", "range": { "startColumn": 8, - "endColumn": 70, + "endColumn": 40, + "lineCount": 1 + } + }, + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 4, + "endColumn": 46, + "lineCount": 1 + } + }, + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 4, + "endColumn": 5, + "lineCount": 7 + } + }, + { + "code": "reportUnusedCallResult", + "range": { + "startColumn": 4, + "endColumn": 5, + "lineCount": 6 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 7, + "endColumn": 19, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 11, + "endColumn": 22, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 43, + "endColumn": 54, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 61, + "endColumn": 72, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 61, + "endColumn": 72, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 29, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportAny", + "range": { + "startColumn": 29, + "endColumn": 37, "lineCount": 1 } } diff --git a/.vscode/settings.json b/.vscode/settings.json index 25f7e7d489..0639d28640 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -71,7 +71,8 @@ "pylint.ignorePatterns": [ "**/pyright-internal/src/tests/samples/*", "**/pyright-internal/typeshed-fallback/*", - "**/docstubs/*" + "**/docstubs/*", + "**/build/py_latest/upstream/*", ], // even though python.terminal.activateEnvInCurrentTerminal is enabled, we still need to add the venv's script folder to the PATH // because vscode's npm tasks do not run with the activated venv diff --git a/build/azuredevops/azure-pipelines-release.yml b/build/azuredevops/azure-pipelines-release.yml index ce2e4598f7..2337adf0fb 100644 --- a/build/azuredevops/azure-pipelines-release.yml +++ b/build/azuredevops/azure-pipelines-release.yml @@ -246,7 +246,7 @@ extends: - task: ManualValidation@0 timeoutInMinutes: 120 # task times out in 2 hours inputs: - notifyUsers: 'plseng@microsoft.com,eric@traut.com,rchiodo@microsoft.com,bschnurr@microsoft.com,graham.wheeler@microsoft.com' + notifyUsers: '[DevDiv]\Python.Language Server,eric@traut.com' instructions: 'In the next 2 hours please test the latest draft release of Pyright, then Publish the release in GitHub.' onTimeout: 'reject' # require sign-off diff --git a/build/py_latest/upstream/__init__.py b/build/py_latest/upstream/__init__.py new file mode 100644 index 0000000000..af3d943e49 --- /dev/null +++ b/build/py_latest/upstream/__init__.py @@ -0,0 +1,5 @@ +""" +scripts that come from upstream that we don't care about. they are only used by pyright maintainers so we never need to use them + +we only keep these scripts to reduce upstream conflicts. +""" \ No newline at end of file diff --git a/build/py_latest/generateUnicodeTables.py b/build/py_latest/upstream/generateUnicodeTables.py similarity index 98% rename from build/py_latest/generateUnicodeTables.py rename to build/py_latest/upstream/generateUnicodeTables.py index bbb89a88ea..6923302fe5 100644 --- a/build/py_latest/generateUnicodeTables.py +++ b/build/py_latest/upstream/generateUnicodeTables.py @@ -5,9 +5,6 @@ # Generates the content of unicode.ts based on the official Unicode # character database. -# this file comes from upstream and we don't use it so we disable the linters & baseline its basedpyright errors -# pylint: skip-file - import sys import urllib.request from io import TextIOWrapper diff --git a/build/py_latest/upstream/updateTypeshed.py b/build/py_latest/upstream/updateTypeshed.py new file mode 100644 index 0000000000..02491e19b9 --- /dev/null +++ b/build/py_latest/upstream/updateTypeshed.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 +""" +Script to update the typeshed-fallback folder with the latest files from +the typeshed repository (https://github.com/python/typeshed). + +This script: +1. Clones/downloads the typeshed repository to a temporary directory +2. Copies the stdlib/ and stubs/ folders to typeshed-fallback +3. Copies the LICENSE and README.md files +4. Updates commit.txt with the current commit hash +""" + +import argparse +import os +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + + +def get_script_dir() -> Path: + """Get the directory where this script is located.""" + return Path(__file__).parent.resolve() + + +def get_typeshed_fallback_dir() -> Path: + """Get the path to the typeshed-fallback directory.""" + script_dir = get_script_dir() + return script_dir.parent / "packages" / "pyright-internal" / "typeshed-fallback" + + +def run_git_command(args: list[str], cwd: Path) -> subprocess.CompletedProcess: + """Run a git command and return the result.""" + return subprocess.run( + ["git"] + args, + cwd=cwd, + capture_output=True, + text=True, + check=True, + ) + + +def clone_typeshed(target_dir: Path, commit: str | None = None) -> str: + """ + Clone the typeshed repository to the target directory. + + Args: + target_dir: Directory to clone into + commit: Optional specific commit hash to checkout + + Returns: + The commit hash that was checked out + """ + typeshed_url = "https://github.com/python/typeshed.git" + + print(f"Cloning typeshed repository to {target_dir}...") + + # Clone with depth 1 for faster download (unless we need a specific commit) + if commit: + # Full clone needed for specific commit + run_git_command(["clone", typeshed_url, str(target_dir)], cwd=target_dir.parent) + run_git_command(["checkout", commit], cwd=target_dir) + else: + # Shallow clone for latest + run_git_command(["clone", "--depth", "1", typeshed_url, str(target_dir)], cwd=target_dir.parent) + + # Get the current commit hash + result = run_git_command(["rev-parse", "HEAD"], cwd=target_dir) + commit_hash = result.stdout.strip() + + print(f"Checked out commit: {commit_hash}") + return commit_hash + + +def remove_directory_contents(dir_path: Path) -> None: + """Remove all contents of a directory but keep the directory itself.""" + if dir_path.exists(): + shutil.rmtree(dir_path) + dir_path.mkdir(parents=True, exist_ok=True) + + +def should_copy_file(file_path: Path) -> bool: + """Check if a file should be copied based on its extension or name.""" + allowed_extensions = {".pyi", ".toml"} + allowed_names = {"VERSIONS"} + return file_path.suffix.lower() in allowed_extensions or file_path.name in allowed_names + + +def is_in_excluded_folder(file_path: Path, base_folder: Path) -> bool: + """Check if the file is inside a folder that starts with '@'.""" + rel_path = file_path.relative_to(base_folder) + for part in rel_path.parts: + if part.startswith("@"): + return True + return False + + +def copy_tree_filtered(src_folder: Path, dst_folder: Path) -> None: + """ + Copy a directory tree, only including .pyi and VERSIONS files. + Skips any folder starting with '@'. + + Args: + src_folder: Source directory + dst_folder: Destination directory + """ + for src_path in src_folder.rglob("*"): + if src_path.is_file() and should_copy_file(src_path): + # Skip files in folders starting with '@' + if is_in_excluded_folder(src_path, src_folder): + continue + + # Calculate relative path and destination + rel_path = src_path.relative_to(src_folder) + dst_path = dst_folder / rel_path + + # Create parent directories if needed + dst_path.parent.mkdir(parents=True, exist_ok=True) + + # Copy the file + shutil.copy2(src_path, dst_path) + + +def copy_typeshed_files(source_dir: Path, dest_dir: Path) -> None: + """ + Copy the relevant typeshed files to the destination directory. + Only .pyi and VERSIONS files are copied from folders. + + Args: + source_dir: The cloned typeshed repository directory + dest_dir: The typeshed-fallback directory + """ + # Folders to copy + folders_to_copy = ["stdlib", "stubs"] + + # Copy folders (only .py and .pyi files) + for folder in folders_to_copy: + src_folder = source_dir / folder + dst_folder = dest_dir / folder + + if not src_folder.exists(): + print(f"Warning: Source folder {src_folder} does not exist, skipping...") + continue + + print(f"Copying {folder}/ (only .pyi and VERSIONS files)...") + + # Remove existing folder contents + remove_directory_contents(dst_folder) + + # Copy the folder with filtering + copy_tree_filtered(src_folder, dst_folder) + + # Files to copy + files_to_copy = ["LICENSE", "README.md"] + + # Copy files + for file in files_to_copy: + src_file = source_dir / file + dst_file = dest_dir / file + + if not src_file.exists(): + print(f"Warning: Source file {src_file} does not exist, skipping...") + continue + + print(f"Copying {file}...") + shutil.copy2(src_file, dst_file) + + +def update_commit_file(dest_dir: Path, commit_hash: str) -> None: + """Update the commit.txt file with the new commit hash.""" + commit_file = dest_dir / "commit.txt" + print(f"Updating commit.txt with {commit_hash}...") + commit_file.write_text(commit_hash + "\n") + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Update typeshed-fallback with the latest typeshed files" + ) + parser.add_argument( + "--commit", + "-c", + type=str, + default=None, + help="Specific commit hash to checkout (default: latest main branch)", + ) + parser.add_argument( + "--dry-run", + "-n", + action="store_true", + help="Show what would be done without making changes", + ) + + args = parser.parse_args() + + typeshed_fallback_dir = get_typeshed_fallback_dir() + + if not typeshed_fallback_dir.exists(): + print(f"Error: typeshed-fallback directory not found at {typeshed_fallback_dir}") + return 1 + + print(f"Typeshed fallback directory: {typeshed_fallback_dir}") + + if args.dry_run: + print("\n*** DRY RUN - No changes will be made ***\n") + print("Would perform the following actions:") + print(" 1. Clone typeshed repository to a temporary directory") + if args.commit: + print(f" 2. Checkout commit: {args.commit}") + else: + print(" 2. Use latest commit from main branch") + print(" 3. Copy stdlib/ folder (only .pyi and VERSIONS files)") + print(" 4. Copy stubs/ folder (only .pyi and VERSIONS files)") + print(" 5. Copy LICENSE file") + print(" 6. Copy README.md file") + print(" 7. Update commit.txt with new commit hash") + return 0 + + # Create a temporary directory for cloning + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + typeshed_clone_dir = temp_path / "typeshed" + + try: + # Clone typeshed + commit_hash = clone_typeshed(typeshed_clone_dir, args.commit) + + # Copy files + copy_typeshed_files(typeshed_clone_dir, typeshed_fallback_dir) + + # Update commit.txt + update_commit_file(typeshed_fallback_dir, commit_hash) + + print("\nTypeshed update complete!") + print(f"Updated to commit: {commit_hash}") + + except subprocess.CalledProcessError as e: + print(f"Error running git command: {e}") + print(f"stdout: {e.stdout}") + print(f"stderr: {e.stderr}") + return 1 + except Exception as e: + print(f"Error: {e}") + return 1 + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/package-lock.json b/package-lock.json index 76144829fa..b23ef3b427 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "devDependencies": { "@detachhead/ts-helpers": "^16.2.0", "@types/glob": "^8.1.0", - "@types/node": "^22.16.0", + "@types/node": "^22.18.12", "@types/yargs": "^16.0.9", "@typescript-eslint/eslint-plugin": "^6.21.0", "@typescript-eslint/parser": "^6.21.0", - "cross-env": "^7.0.3", + "cross-env": "^10.1.0", "eslint": "^8.57.1", - "eslint-config-prettier": "^8.10.0", + "eslint-config-prettier": "^8.10.2", "eslint-plugin-simple-import-sort": "^10.0.0", "glob": "^8.1.0", "jsonc-parser": "^3.3.1", @@ -62,19 +62,6 @@ "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@azure/abort-controller": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", @@ -290,46 +277,47 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.26.8", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", - "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", - "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", - "dev": true, - "peer": true, - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.10", - "@babel/helper-compilation-targets": "^7.26.5", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.10", - "@babel/parser": "^7.26.10", - "@babel/template": "^7.26.9", - "@babel/traverse": "^7.26.10", - "@babel/types": "^7.26.10", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -354,15 +342,16 @@ } }, "node_modules/@babel/generator": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.10.tgz", - "integrity": "sha512-rRHT8siFIXQrAYOYqZQVsAr8vJ+cBNqcVAY6m5V8/4QqzaPl+zDBe6cLEPRDuNOUf3ww8RfJVlOyQMoSI+5Ang==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/parser": "^7.26.10", - "@babel/types": "^7.26.10", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" }, "engines": { @@ -370,13 +359,14 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", - "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -390,6 +380,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^3.0.2" } @@ -399,6 +390,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -407,30 +399,43 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true + "dev": true, + "license": "ISC" + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } }, "node_modules/@babel/helper-module-imports": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", - "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.25.9" + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", - "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9", - "@babel/traverse": "^7.25.9" + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" }, "engines": { "node": ">=6.9.0" @@ -440,27 +445,29 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.26.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", - "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "dev": true, "license": "MIT", "engines": { @@ -468,34 +475,37 @@ } }, "node_modules/@babel/helper-validator-option": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", - "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz", - "integrity": "sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/template": "^7.26.9", - "@babel/types": "^7.26.10" + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz", - "integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.26.10" + "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" @@ -595,12 +605,13 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", - "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -712,12 +723,13 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", - "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.25.9" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -727,54 +739,48 @@ } }, "node_modules/@babel/template": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", - "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/parser": "^7.26.9", - "@babel/types": "^7.26.9" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.10.tgz", - "integrity": "sha512-k8NuDrxr0WrPH5Aupqb2LCVURP/S0vBEn5mK6iH+GIYob66U5EtoZvcdudR2jQ4cmTwhEwW1DLB+Yyas9zjF6A==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.10", - "@babel/parser": "^7.26.10", - "@babel/template": "^7.26.9", - "@babel/types": "^7.26.10", - "debug": "^4.3.1", - "globals": "^11.1.0" + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/types": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz", - "integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -891,372 +897,487 @@ "fast-check": "^3.12.0" } }, + "node_modules/@emnapi/core": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz", + "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@epic-web/invariant": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@epic-web/invariant/-/invariant-1.0.0.tgz", + "integrity": "sha512-lrTPqgvfFQtR/eY/qkIzp98OGdNJu0m5ji3q/nJI8v3SXkRKEnWiOxMmbvcSoAIzv/cGiuvRy57k4suKQSAdwA==", + "dev": true, + "license": "MIT" + }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", - "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", "cpu": [ "ppc64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "aix" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", - "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", "cpu": [ "arm" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", - "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/android-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", - "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "android" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", - "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "darwin" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", - "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "darwin" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", - "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "freebsd" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", - "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "freebsd" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", - "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", "cpu": [ "arm" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", - "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", - "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", "cpu": [ "ia32" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", - "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", "cpu": [ "loong64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", - "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", "cpu": [ "mips64el" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", - "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", "cpu": [ "ppc64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", - "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", "cpu": [ "riscv64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", - "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", "cpu": [ "s390x" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/linux-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", - "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", - "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "netbsd" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", - "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "openbsd" ], "engines": { - "node": ">=12" + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", - "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "sunos" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", - "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", "cpu": [ "arm64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", - "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", "cpu": [ "ia32" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@esbuild/win32-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", - "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", "cpu": [ "x64" ], "dev": true, + "license": "MIT", "optional": true, "os": [ "win32" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { @@ -1423,6 +1544,27 @@ "dev": true, "license": "BSD-3-Clause" }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -1645,6 +1787,7 @@ "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", @@ -1662,6 +1805,7 @@ "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", "dev": true, + "peer": true, "dependencies": { "@jest/console": "^29.7.0", "@jest/reporters": "^29.7.0", @@ -1704,11 +1848,22 @@ } } }, + "node_modules/@jest/diff-sequences": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", + "integrity": "sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/environment": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", "dev": true, + "peer": true, "dependencies": { "@jest/fake-timers": "^29.7.0", "@jest/types": "^29.6.3", @@ -1724,6 +1879,7 @@ "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", "dev": true, + "peer": true, "dependencies": { "expect": "^29.7.0", "jest-snapshot": "^29.7.0" @@ -1737,6 +1893,7 @@ "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", "dev": true, + "peer": true, "dependencies": { "jest-get-type": "^29.6.3" }, @@ -1749,6 +1906,7 @@ "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "@sinonjs/fake-timers": "^10.0.2", @@ -1761,11 +1919,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/@jest/get-type": { + "version": "30.1.0", + "resolved": "https://registry.npmjs.org/@jest/get-type/-/get-type-30.1.0.tgz", + "integrity": "sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/globals": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/expect": "^29.7.0", @@ -1776,11 +1945,36 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/@jest/pattern": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/pattern/-/pattern-30.0.1.tgz", + "integrity": "sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-regex-util": "30.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/pattern/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, "node_modules/@jest/reporters": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", "dev": true, + "peer": true, "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.7.0", @@ -1825,6 +2019,7 @@ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -1837,6 +2032,7 @@ "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -1858,6 +2054,7 @@ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -1871,6 +2068,7 @@ "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@sinclair/typebox": "^0.27.8" }, @@ -1878,40 +2076,108 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "node_modules/@jest/snapshot-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/snapshot-utils/-/snapshot-utils-30.2.0.tgz", + "integrity": "sha512-0aVxM3RH6DaiLcjj/b0KrIBZhSX1373Xci4l3cW5xiUWPctZ59zQ7jj4rqcJQ/Z8JuN/4wX3FpJSa3RssVvCug==", "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "natural-compare": "^1.4.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, - "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "node_modules/@jest/snapshot-utils/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", "dev": true, + "license": "MIT", "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" + "@sinclair/typebox": "^0.34.0" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, - "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "node_modules/@jest/snapshot-utils/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/@sinclair/typebox": { + "version": "0.34.47", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.47.tgz", + "integrity": "sha512-ZGIBQ+XDvO5JQku9wmwtabcVTHJsgSWAHYtVuM9pBNNR5E88v6Jcj/llpmsjivig5X8A8HHOb4/mbEKPS5EvAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/snapshot-utils/node_modules/@types/yargs": { + "version": "17.0.35", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz", + "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "peer": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "peer": true, "dependencies": { "@jest/test-result": "^29.7.0", "graceful-fs": "^4.2.9", @@ -1954,6 +2220,7 @@ "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", "dev": true, + "peer": true, "dependencies": { "imurmurhash": "^0.1.4", "signal-exit": "^3.0.7" @@ -1985,22 +2252,31 @@ "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", "dev": true, + "peer": true, "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { @@ -2012,20 +2288,12 @@ "node": ">=6.0.0" } }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@jridgewell/source-map": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", - "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.11.tgz", + "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" @@ -2038,10 +2306,11 @@ "dev": true }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" @@ -2068,6 +2337,19 @@ "@lumino/algorithm": "^2.0.2" } }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -2172,6 +2454,19 @@ "node": ">=14" } }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, "node_modules/@pnpm/config.env-replace": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz", @@ -2222,7 +2517,8 @@ "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/@sindresorhus/is": { "version": "5.6.0", @@ -2237,6 +2533,19 @@ "url": "https://github.com/sindresorhus/is?sponsor=1" } }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@sinonjs/commons": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", @@ -2251,6 +2560,7 @@ "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", "dev": true, + "peer": true, "dependencies": { "@sinonjs/commons": "^3.0.0" } @@ -2278,6 +2588,17 @@ "node": ">= 10" } }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -2368,10 +2689,11 @@ } }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", - "dev": true + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" }, "node_modules/@types/fs-extra": { "version": "11.0.4", @@ -2399,6 +2721,7 @@ "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", "dev": true, + "peer": true, "dependencies": { "@types/node": "*" } @@ -2445,13 +2768,237 @@ } }, "node_modules/@types/jest": { - "version": "29.5.14", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", - "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "version": "30.0.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-30.0.0.tgz", + "integrity": "sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^30.0.0", + "pretty-format": "^30.0.0" + } + }, + "node_modules/@types/jest/node_modules/@jest/expect-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-30.2.0.tgz", + "integrity": "sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/@sinclair/typebox": { + "version": "0.34.47", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.47.tgz", + "integrity": "sha512-ZGIBQ+XDvO5JQku9wmwtabcVTHJsgSWAHYtVuM9pBNNR5E88v6Jcj/llpmsjivig5X8A8HHOb4/mbEKPS5EvAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/jest/node_modules/@types/yargs": { + "version": "17.0.35", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz", + "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/jest/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@types/jest/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@types/jest/node_modules/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/jest-diff": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-30.2.0.tgz", + "integrity": "sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/jest-matcher-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-30.2.0.tgz", + "integrity": "sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "jest-diff": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/jest-message-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-30.2.0.tgz", + "integrity": "sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.2.0", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/jest-mock": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-30.2.0.tgz", + "integrity": "sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@types/jest/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, "node_modules/@types/json-schema": { @@ -2484,9 +3031,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.18.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.0.tgz", - "integrity": "sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==", + "version": "22.19.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.3.tgz", + "integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==", "dev": true, "license": "MIT", "dependencies": { @@ -2585,7 +3132,6 @@ "integrity": "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", @@ -2743,25 +3289,294 @@ } }, "node_modules/@ungap/structured-clone": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.1.tgz", - "integrity": "sha512-fEzPV3hSkSMltkw152tJKNARhOupqbH96MZWyRjNaYZOMIzbrTeQDG+MTc6Mr2pgzFQzFxAfmhGDNP5QK++2ZA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "dev": true, "license": "ISC" }, - "node_modules/@vscode/python-extension": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@vscode/python-extension/-/python-extension-1.0.5.tgz", - "integrity": "sha512-uYhXUrL/gn92mfqhjAwH2+yGOpjloBxj9ekoL4BhUsKcyJMpEg6WlNf3S3si+5x9zlbHHe7FYQNjZEbz1ymI9Q==", - "engines": { - "node": ">=16.17.1", - "vscode": "^1.78.0" - } - }, - "node_modules/@vscode/vsce": { - "version": "2.32.0", - "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-2.32.0.tgz", - "integrity": "sha512-3EFJfsgrSftIqt3EtdRcAygy/OJ3hstyI1cDmIgkU9CFZW5C+3djr6mfosndCUqcVYuyjmxOK1xmFp/Bq7+NIg==", + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@vscode/python-extension": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@vscode/python-extension/-/python-extension-1.0.5.tgz", + "integrity": "sha512-uYhXUrL/gn92mfqhjAwH2+yGOpjloBxj9ekoL4BhUsKcyJMpEg6WlNf3S3si+5x9zlbHHe7FYQNjZEbz1ymI9Q==", + "engines": { + "node": ">=16.17.1", + "vscode": "^1.78.0" + } + }, + "node_modules/@vscode/vsce": { + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-2.32.0.tgz", + "integrity": "sha512-3EFJfsgrSftIqt3EtdRcAygy/OJ3hstyI1cDmIgkU9CFZW5C+3djr6mfosndCUqcVYuyjmxOK1xmFp/Bq7+NIg==", "dev": true, "dependencies": { "@azure/identity": "^4.1.0", @@ -3331,12 +4146,11 @@ "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -3344,6 +4158,19 @@ "node": ">=0.4.0" } }, + "node_modules/acorn-import-phases": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz", + "integrity": "sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "acorn": "^8.14.0" + } + }, "node_modules/acorn-jsx": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", @@ -3595,13 +4422,6 @@ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.1.tgz", "integrity": "sha512-k8TVBiPkPJT9uHLdOKfFpqcfprwBFOAAXXozRubr7R7PfIuKvQlzcI4M0pALeqXN09vdaMbUdUj+pass+uULAg==" }, - "node_modules/async": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", - "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", - "dev": true, - "license": "MIT" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3660,6 +4480,7 @@ "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, + "peer": true, "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", @@ -3676,6 +4497,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", "dev": true, + "peer": true, "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", @@ -3692,6 +4514,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "peer": true, "bin": { "semver": "bin/semver.js" } @@ -3701,6 +4524,7 @@ "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", "dev": true, + "peer": true, "dependencies": { "@babel/template": "^7.3.3", "@babel/types": "^7.3.3", @@ -3712,10 +4536,11 @@ } }, "node_modules/babel-preset-current-node-syntax": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", - "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.2.0.tgz", + "integrity": "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-bigint": "^7.8.3", @@ -3734,7 +4559,7 @@ "@babel/plugin-syntax-top-level-await": "^7.14.5" }, "peerDependencies": { - "@babel/core": "^7.0.0" + "@babel/core": "^7.0.0 || ^8.0.0-0" } }, "node_modules/babel-preset-jest": { @@ -3742,6 +4567,7 @@ "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", "dev": true, + "peer": true, "dependencies": { "babel-plugin-jest-hoist": "^29.6.3", "babel-preset-current-node-syntax": "^1.0.0" @@ -3757,6 +4583,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, "license": "MIT" }, "node_modules/base64-js": { @@ -3783,6 +4610,16 @@ "resolved": "packages/pyright", "link": true }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.12", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.12.tgz", + "integrity": "sha512-Mij6Lij93pTAIsSYy5cyBQ975Qh9uLEc5rwGTpomiZeXZL9yIS6uORJakb3ScHgfs0serMMfIbXzokPMuEiRyw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, "node_modules/big.js": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", @@ -3977,6 +4814,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" @@ -4103,9 +4941,9 @@ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/browserslist": { - "version": "4.24.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", - "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", "dev": true, "funding": [ { @@ -4121,12 +4959,13 @@ "url": "https://github.com/sponsors/ai" } ], - "peer": true, + "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001688", - "electron-to-chromium": "^1.5.73", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.1" + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" }, "bin": { "browserslist": "cli.js" @@ -4329,9 +5168,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001706", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001706.tgz", - "integrity": "sha512-3ZczoTApMAZwPKYWmwVbQMFpXBDds3/0VciVoUwPUbldlYyVLmRVuRs/PcUZtHpbLRpzzDvrvnFuREsGt6lUug==", + "version": "1.0.30001763", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001763.tgz", + "integrity": "sha512-mh/dGtq56uN98LlNX9qdbKnzINhX0QzhiWBFEkFfsFO4QyCvL8YegrJAazCwXIeqkIob8BlZPGM3xdnY+sgmvQ==", "dev": true, "funding": [ { @@ -4346,7 +5185,8 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/chalk": { "version": "4.1.2", @@ -4509,7 +5349,8 @@ "version": "1.4.3", "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", - "dev": true + "dev": true, + "peer": true }, "node_modules/clean-stack": { "version": "2.2.0", @@ -4918,20 +5759,21 @@ "dev": true }, "node_modules/copy-webpack-plugin": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz", - "integrity": "sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==", + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-12.0.2.tgz", + "integrity": "sha512-SNwdBeHyII+rWvee/bTnAYyO8vfVdcSTud4EIb6jcZ8inLeWucJE0DnxXQBjlQ5zlteuuvooGQy3LIyGxhvlOA==", "dev": true, + "license": "MIT", "dependencies": { - "fast-glob": "^3.2.11", + "fast-glob": "^3.3.2", "glob-parent": "^6.0.1", - "globby": "^13.1.1", + "globby": "^14.0.0", "normalize-path": "^3.0.0", - "schema-utils": "^4.0.0", - "serialize-javascript": "^6.0.0" + "schema-utils": "^4.2.0", + "serialize-javascript": "^6.0.2" }, "engines": { - "node": ">= 14.15.0" + "node": ">= 18.12.0" }, "funding": { "type": "opencollective", @@ -4942,31 +5784,57 @@ } }, "node_modules/copy-webpack-plugin/node_modules/globby": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz", - "integrity": "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", "dev": true, + "license": "MIT", "dependencies": { - "dir-glob": "^3.0.1", - "fast-glob": "^3.3.0", - "ignore": "^5.2.4", - "merge2": "^1.4.1", - "slash": "^4.0.0" + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/copy-webpack-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/copy-webpack-plugin/node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/copy-webpack-plugin/node_modules/slash": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", - "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", "dev": true, + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=14.16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -5022,6 +5890,7 @@ "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "chalk": "^4.0.0", @@ -5039,22 +5908,21 @@ } }, "node_modules/cross-env": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", - "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-10.1.0.tgz", + "integrity": "sha512-GsYosgnACZTADcmEyJctkJIoqAhHjttw7RsFrVoJNXbsWWqaq6Ym+7kZjq6mS45O0jij6vtiReppKQEtqWy6Dw==", "dev": true, "license": "MIT", "dependencies": { - "cross-spawn": "^7.0.1" + "@epic-web/invariant": "^1.0.0", + "cross-spawn": "^7.0.6" }, "bin": { - "cross-env": "src/bin/cross-env.js", - "cross-env-shell": "src/bin/cross-env-shell.js" + "cross-env": "dist/bin/cross-env.js", + "cross-env-shell": "dist/bin/cross-env-shell.js" }, "engines": { - "node": ">=10.14", - "npm": ">=6", - "yarn": ">=1" + "node": ">=20" } }, "node_modules/cross-spawn": { @@ -5215,9 +6083,9 @@ } }, "node_modules/dedent": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", - "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.1.tgz", + "integrity": "sha512-9JmrhGZpOlEgOLdQgSm0zxFaYoQon408V1v49aqTWuXENVlnCuY9JBZcXZiCsZQWDjTm5Qf/nIvAy77mXDAjEg==", "dev": true, "license": "MIT", "peerDependencies": { @@ -5380,6 +6248,7 @@ "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } @@ -5509,27 +6378,12 @@ "safe-buffer": "^5.0.1" } }, - "node_modules/ejs": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", - "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "jake": "^10.8.5" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/electron-to-chromium": { - "version": "1.5.120", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.120.tgz", - "integrity": "sha512-oTUp3gfX1gZI+xfD2djr2rzQdHCwHzPQrrK0CD7WpTdF0nPdQ/INcRVjWgLdCT4a9W3jFObR9DAfsuyFQnI8CQ==", - "dev": true + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" }, "node_modules/elliptic": { "version": "6.6.1", @@ -5634,7 +6488,6 @@ "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, "license": "MIT", - "optional": true, "dependencies": { "once": "^1.4.0" } @@ -5721,10 +6574,11 @@ } }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", - "dev": true + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", + "dev": true, + "license": "MIT" }, "node_modules/es-object-atoms": { "version": "1.1.1", @@ -5738,56 +6592,61 @@ } }, "node_modules/esbuild": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", - "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", "dev": true, "hasInstallScript": true, + "license": "MIT", "bin": { "esbuild": "bin/esbuild" }, "engines": { - "node": ">=12" + "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.19.12", - "@esbuild/android-arm": "0.19.12", - "@esbuild/android-arm64": "0.19.12", - "@esbuild/android-x64": "0.19.12", - "@esbuild/darwin-arm64": "0.19.12", - "@esbuild/darwin-x64": "0.19.12", - "@esbuild/freebsd-arm64": "0.19.12", - "@esbuild/freebsd-x64": "0.19.12", - "@esbuild/linux-arm": "0.19.12", - "@esbuild/linux-arm64": "0.19.12", - "@esbuild/linux-ia32": "0.19.12", - "@esbuild/linux-loong64": "0.19.12", - "@esbuild/linux-mips64el": "0.19.12", - "@esbuild/linux-ppc64": "0.19.12", - "@esbuild/linux-riscv64": "0.19.12", - "@esbuild/linux-s390x": "0.19.12", - "@esbuild/linux-x64": "0.19.12", - "@esbuild/netbsd-x64": "0.19.12", - "@esbuild/openbsd-x64": "0.19.12", - "@esbuild/sunos-x64": "0.19.12", - "@esbuild/win32-arm64": "0.19.12", - "@esbuild/win32-ia32": "0.19.12", - "@esbuild/win32-x64": "0.19.12" + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" } }, "node_modules/esbuild-loader": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-3.2.0.tgz", - "integrity": "sha512-lnIdRMQpk50alCa0QoW0ozc0D3rjJXl02mtMsk9INIcW25RPZhDja332bu85ixwVNbhQ7VfBRcQyZ/qza8mWiA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-4.4.2.tgz", + "integrity": "sha512-8LdoT9sC7fzfvhxhsIAiWhzLJr9yT3ggmckXxsgvM07wgrRxhuT98XhLn3E7VczU5W5AFsPKv9DdWcZIubbWkQ==", "dev": true, + "license": "MIT", "dependencies": { - "esbuild": "^0.19.0", - "get-tsconfig": "^4.6.2", + "esbuild": "^0.27.1", + "get-tsconfig": "^4.10.1", "loader-utils": "^2.0.4", "webpack-sources": "^1.4.3" }, "funding": { - "url": "https://github.com/esbuild-kit/esbuild-loader?sponsor=1" + "url": "https://github.com/privatenumber/esbuild-loader?sponsor=1" }, "peerDependencies": { "webpack": "^4.40.0 || ^5.0.0" @@ -5836,7 +6695,6 @@ "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -5888,9 +6746,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz", - "integrity": "sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg==", + "version": "8.10.2", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.10.2.tgz", + "integrity": "sha512-/IGJ6+Dka158JnP5n5YFMOszjDWrXggGz1LaK/guZq9vZTmniaKlHcsscvkAhn9y4U+BU3JuUdYvtAMcv30y4A==", "dev": true, "license": "MIT", "bin": { @@ -6068,9 +6926,9 @@ } }, "node_modules/execa": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz", - "integrity": "sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "license": "MIT", "dependencies": { @@ -6096,6 +6954,17 @@ "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", "dev": true, + "peer": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/exit-x": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/exit-x/-/exit-x-0.2.2.tgz", + "integrity": "sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==", + "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } @@ -6115,6 +6984,7 @@ "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", "dev": true, + "peer": true, "dependencies": { "@jest/expect-utils": "^29.7.0", "jest-get-type": "^29.6.3", @@ -6280,29 +7150,6 @@ "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/filelist": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", - "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "minimatch": "^5.0.1" - } - }, - "node_modules/filelist/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/fill-range": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", @@ -6651,10 +7498,11 @@ } }, "node_modules/get-tsconfig": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz", - "integrity": "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==", + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", + "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==", "dev": true, + "license": "MIT", "dependencies": { "resolve-pkg-maps": "^1.0.0" }, @@ -6707,7 +7555,8 @@ "version": "0.4.1", "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", - "dev": true + "dev": true, + "license": "BSD-2-Clause" }, "node_modules/glob/node_modules/minimatch": { "version": "5.1.6", @@ -6849,6 +7698,28 @@ "dev": true, "license": "MIT" }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -7135,9 +8006,9 @@ } }, "node_modules/import-local": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", - "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", "dev": true, "license": "MIT", "dependencies": { @@ -7211,6 +8082,7 @@ "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.10" } @@ -7606,6 +8478,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, + "peer": true, "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", @@ -7644,49 +8517,6 @@ "@pkgjs/parseargs": "^0.11.0" } }, - "node_modules/jake": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", - "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", - "filelist": "^1.0.4", - "minimatch": "^3.1.2" - }, - "bin": { - "jake": "bin/cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/jake/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/jake/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", @@ -7719,6 +8549,7 @@ "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", "dev": true, + "peer": true, "dependencies": { "execa": "^5.0.0", "jest-util": "^29.7.0", @@ -7733,6 +8564,7 @@ "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/expect": "^29.7.0", @@ -7764,6 +8596,7 @@ "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", "dev": true, + "peer": true, "dependencies": { "@jest/core": "^29.7.0", "@jest/test-result": "^29.7.0", @@ -7797,6 +8630,7 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, + "peer": true, "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -7815,6 +8649,7 @@ "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", "dev": true, + "peer": true, "dependencies": { "@babel/core": "^7.11.6", "@jest/test-sequencer": "^29.7.0", @@ -7861,6 +8696,7 @@ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -7873,6 +8709,7 @@ "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -7894,6 +8731,7 @@ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -7907,6 +8745,7 @@ "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "chalk": "^4.0.0", "diff-sequences": "^29.6.3", @@ -7922,6 +8761,7 @@ "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", "dev": true, + "peer": true, "dependencies": { "detect-newline": "^3.0.0" }, @@ -7934,6 +8774,7 @@ "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "chalk": "^4.0.0", @@ -7950,6 +8791,7 @@ "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", @@ -7968,6 +8810,7 @@ "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } @@ -7977,6 +8820,7 @@ "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "@types/graceful-fs": "^4.1.3", @@ -8026,6 +8870,7 @@ "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", "dev": true, + "peer": true, "dependencies": { "jest-get-type": "^29.6.3", "pretty-format": "^29.7.0" @@ -8039,6 +8884,7 @@ "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", "dev": true, + "peer": true, "dependencies": { "chalk": "^4.0.0", "jest-diff": "^29.7.0", @@ -8054,6 +8900,7 @@ "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", "dev": true, + "peer": true, "dependencies": { "@babel/code-frame": "^7.12.13", "@jest/types": "^29.6.3", @@ -8074,6 +8921,7 @@ "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", @@ -8105,6 +8953,7 @@ "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", "dev": true, + "peer": true, "engines": { "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } @@ -8114,6 +8963,7 @@ "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", "dev": true, + "peer": true, "dependencies": { "chalk": "^4.0.0", "graceful-fs": "^4.2.9", @@ -8134,6 +8984,7 @@ "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", "dev": true, + "peer": true, "dependencies": { "jest-regex-util": "^29.6.3", "jest-snapshot": "^29.7.0" @@ -8147,6 +8998,7 @@ "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", "dev": true, + "peer": true, "dependencies": { "@jest/console": "^29.7.0", "@jest/environment": "^29.7.0", @@ -8179,6 +9031,7 @@ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", "dev": true, + "peer": true, "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" @@ -8189,6 +9042,7 @@ "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", @@ -8223,6 +9077,7 @@ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -8235,6 +9090,7 @@ "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -8256,6 +9112,7 @@ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "license": "ISC", + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -8268,6 +9125,7 @@ "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", "dev": true, + "peer": true, "dependencies": { "@babel/core": "^7.11.6", "@babel/generator": "^7.7.2", @@ -8299,6 +9157,7 @@ "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "@types/node": "*", @@ -8316,6 +9175,7 @@ "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", "dev": true, + "peer": true, "dependencies": { "@jest/types": "^29.6.3", "camelcase": "^6.2.0", @@ -8333,6 +9193,7 @@ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true, + "peer": true, "engines": { "node": ">=10" }, @@ -8345,6 +9206,7 @@ "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", "dev": true, + "peer": true, "dependencies": { "@jest/test-result": "^29.7.0", "@jest/types": "^29.6.3", @@ -8364,6 +9226,7 @@ "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", "dev": true, + "peer": true, "dependencies": { "@types/node": "*", "jest-util": "^29.7.0", @@ -8379,6 +9242,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, + "peer": true, "dependencies": { "has-flag": "^4.0.0" }, @@ -8680,12 +9544,17 @@ } }, "node_modules/loader-runner": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", - "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.1.tgz", + "integrity": "sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.11.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" } }, "node_modules/loader-utils": { @@ -9242,6 +10111,22 @@ "dev": true, "optional": true }, + "node_modules/napi-postinstall": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.4.tgz", + "integrity": "sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -9266,6 +10151,13 @@ "dev": true, "license": "MIT" }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true, + "license": "MIT" + }, "node_modules/node-abi": { "version": "3.74.0", "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.74.0.tgz", @@ -9293,10 +10185,11 @@ "dev": true }, "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "dev": true + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" }, "node_modules/normalize-path": { "version": "3.0.0", @@ -10888,10 +11781,11 @@ } }, "node_modules/pirates": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", - "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 6" } @@ -11032,6 +11926,7 @@ "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@jest/schemas": "^29.6.3", "ansi-styles": "^5.0.0", @@ -11047,6 +11942,7 @@ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=10" }, @@ -11159,7 +12055,6 @@ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", "dev": true, - "optional": true, "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -11613,6 +12508,7 @@ "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" } @@ -11623,6 +12519,7 @@ "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=10" } @@ -11836,10 +12733,11 @@ "dev": true }, "node_modules/schema-utils": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", - "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.3.tgz", + "integrity": "sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==", "dev": true, + "license": "MIT", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.9.0", @@ -11859,7 +12757,6 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dev": true, - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", @@ -11890,9 +12787,9 @@ "dev": true }, "node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -12005,12 +12902,14 @@ } }, "node_modules/shelljs": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", - "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.9.2.tgz", + "integrity": "sha512-S3I64fEiKgTZzKCC46zT/Ib9meqofLrQVbpSswtjFfAVDW+AZ54WTnAM/3/yENoxz/V1Cy6u3kiiEbQ4DNphvw==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "glob": "^7.0.0", + "execa": "^1.0.0", + "fast-glob": "^3.3.2", "interpret": "^1.0.0", "rechoir": "^0.6.2" }, @@ -12018,81 +12917,164 @@ "shjs": "bin/shjs" }, "engines": { - "node": ">=4" + "node": ">=18" } }, - "node_modules/shelljs/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "node_modules/shelljs/node_modules/cross-spawn": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/shelljs/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" }, "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": ">=4.8" } }, - "node_modules/shelljs/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "node_modules/shelljs/node_modules/execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "brace-expansion": "^1.1.7" + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" }, "engines": { - "node": "*" + "node": ">=6" } }, - "node_modules/shx": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", - "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", + "node_modules/shelljs/node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "dev": true, + "license": "MIT", "dependencies": { - "minimist": "^1.2.3", - "shelljs": "^0.8.5" - }, - "bin": { - "shx": "lib/cli.js" + "pump": "^3.0.0" }, "engines": { "node": ">=6" } }, - "node_modules/side-channel": { + "node_modules/shelljs/node_modules/is-stream": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", "dev": true, - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/shelljs/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/shx": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.4.0.tgz", + "integrity": "sha512-Z0KixSIlGPpijKgcH6oCMCbltPImvaKy0sGH8AkLRXw1KyzpKtaCTizP2xen+hNDqVF4xxgvA0KXSb9o4Q6hnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.8", + "shelljs": "^0.9.2" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" }, "engines": { @@ -12238,9 +13220,10 @@ } }, "node_modules/smol-toml": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.3.1.tgz", - "integrity": "sha512-tEYNll18pPKHroYSmLLrksq233j021G0giwW7P3D24jC54pQ5W5BXMsQ/Mvw1OJCmEYDgY+lrzT+3nNUtoNfXQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.6.0.tgz", + "integrity": "sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==", + "license": "BSD-3-Clause", "engines": { "node": ">= 18" }, @@ -12492,6 +13475,16 @@ "node": ">=8" } }, + "node_modules/strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -12540,6 +13533,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/synckit": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", + "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, "node_modules/syncpack": { "version": "10.9.3", "resolved": "https://registry.npmjs.org/syncpack/-/syncpack-10.9.3.tgz", @@ -12749,12 +13758,17 @@ } }, "node_modules/tapable": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", - "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" } }, "node_modules/tar": { @@ -12850,13 +13864,14 @@ } }, "node_modules/terser": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.39.0.tgz", - "integrity": "sha512-LBAhFyLho16harJoWMg/nZsQYgTrg5jXOn2nCYjRUcZZEdE3qa2zb8QEDRUGVZBW4rlazf2fxkg8tztybTaqWw==", + "version": "5.44.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.44.1.tgz", + "integrity": "sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", + "acorn": "^8.15.0", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, @@ -12868,10 +13883,11 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.14.tgz", - "integrity": "sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==", + "version": "5.3.16", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz", + "integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "jest-worker": "^27.4.5", @@ -12906,6 +13922,7 @@ "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", @@ -12920,6 +13937,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -12934,7 +13952,8 @@ "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/test-exclude": { "version": "6.0.0", @@ -13018,9 +14037,9 @@ } }, "node_modules/tmp": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", - "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", "license": "MIT", "engines": { "node": ">=14.14" @@ -13078,19 +14097,20 @@ "dev": true }, "node_modules/ts-jest": { - "version": "29.2.6", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.6.tgz", - "integrity": "sha512-yTNZVZqc8lSixm+QGVFcPe6+yj7+TWZwIesuOWvfcn4B9bz5x4NDzVCQQjOs7Hfouu36aEqfEbo9Qpo+gq8dDg==", + "version": "29.4.6", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.6.tgz", + "integrity": "sha512-fSpWtOO/1AjSNQguk43hb/JCo16oJDnMJf3CdEGNkqsEX3t0KX96xvyX1D7PfLCpVoKu4MfVrqUkFyblYoY4lA==", "dev": true, + "license": "MIT", "dependencies": { "bs-logger": "^0.2.6", - "ejs": "^3.1.10", "fast-json-stable-stringify": "^2.1.0", - "jest-util": "^29.0.0", + "handlebars": "^4.7.8", "json5": "^2.2.3", "lodash.memoize": "^4.1.2", "make-error": "^1.3.6", - "semver": "^7.7.1", + "semver": "^7.7.3", + "type-fest": "^4.41.0", "yargs-parser": "^21.1.1" }, "bin": { @@ -13101,10 +14121,11 @@ }, "peerDependencies": { "@babel/core": ">=7.0.0-beta.0 <8", - "@jest/transform": "^29.0.0", - "@jest/types": "^29.0.0", - "babel-jest": "^29.0.0", - "jest": "^29.0.0", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", "typescript": ">=4.3 <6" }, "peerDependenciesMeta": { @@ -13122,26 +14143,31 @@ }, "esbuild": { "optional": true + }, + "jest-util": { + "optional": true } } }, - "node_modules/ts-jest/node_modules/semver": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", - "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", "dev": true, - "bin": { - "semver": "bin/semver.js" - }, + "license": "(MIT OR CC0-1.0)", "engines": { - "node": ">=10" + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/ts-loader": { - "version": "9.5.2", - "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.5.2.tgz", - "integrity": "sha512-Qo4piXvOTWcMGIgRiuFa6nHNm+54HbYaZCKqc9eeZCLRy3XqafQgwX2F7mofrbJG3g7EEb+lkiR+z2Lic2s3Zw==", + "version": "9.5.4", + "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.5.4.tgz", + "integrity": "sha512-nCz0rEwunlTZiy6rXFByQU1kVVpCIgUpc/psFiKVrUwrizdnIbRFu8w7bxhUF0X613DYwT4XzrZHpVyMe758hQ==", "dev": true, + "license": "MIT", "dependencies": { "chalk": "^4.1.0", "enhanced-resolve": "^5.0.0", @@ -13278,7 +14304,6 @@ "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -13301,6 +14326,20 @@ "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", "dev": true }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/underscore": { "version": "1.13.7", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", @@ -13325,6 +14364,19 @@ "dev": true, "license": "MIT" }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/unique-filename": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", @@ -13377,6 +14429,41 @@ "node": ">= 10.0.0" } }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, "node_modules/untildify": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", @@ -13388,9 +14475,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", "dev": true, "funding": [ { @@ -13406,6 +14493,7 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" @@ -13543,32 +14631,35 @@ "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==" }, "node_modules/vscode-jsonrpc": { - "version": "9.0.0-next.7", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-9.0.0-next.7.tgz", - "integrity": "sha512-7SgnbbbJfYr3off0T2KV/RCMYhVsuLeFPw8l3bkxSiavtoTLsOdu1jyxK3yWbdQuO8QOJC7+no0TXmYjRWSC+g==", + "version": "9.0.0-next.11", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-9.0.0-next.11.tgz", + "integrity": "sha512-u6LElQNbSiE9OugEEmrUKwH6+8BpPz2S5MDHvQUqHL//I4Q8GPikKLOUf856UnbLkZdhxaPrExac1lA3XwpIPA==", + "license": "MIT", "engines": { "node": ">=14.0.0" } }, "node_modules/vscode-languageclient": { - "version": "10.0.0-next.14", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-10.0.0-next.14.tgz", - "integrity": "sha512-4m/cpNocRgrAkWc8IH4wd3zllAs16NvMmeGcQxFa6xt+mGXJASIeqp0NAFWKZERKg6ClVgBph+SDSZSVvNZ2oA==", + "version": "10.0.0-next.19", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-10.0.0-next.19.tgz", + "integrity": "sha512-sJtO8y0Dxs4ue/DK0QgO/ATBfZwQdee3TqvCsoqUej/GZrBA01DTf4pbfswRxHsTxN2yH0haImUnMafWHtE4CQ==", + "license": "MIT", "dependencies": { - "minimatch": "^10.0.1", - "semver": "^7.6.3", - "vscode-languageserver-protocol": "3.17.6-next.12" + "minimatch": "^10.0.3", + "semver": "^7.7.1", + "vscode-languageserver-protocol": "3.17.6-next.16" }, "engines": { "vscode": "^1.91.0" } }, "node_modules/vscode-languageclient/node_modules/minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "@isaacs/brace-expansion": "^5.0.0" }, "engines": { "node": "20 || >=22" @@ -13578,22 +14669,24 @@ } }, "node_modules/vscode-languageserver": { - "version": "10.0.0-next.12", - "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-10.0.0-next.12.tgz", - "integrity": "sha512-6lT2CJhH93YFmdDrFTwWvuG0/yzEN2Zbw/DfPaRF91sylZ3TSD0NkJU5jug6t/3NLoDh9VjfJZkgkKr6e3UmRw==", + "version": "10.0.0-next.16", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-10.0.0-next.16.tgz", + "integrity": "sha512-RbsYDOhddv1NtBCAR7+oVxxCmOpQUHhrtgUE0xz6J+BJGSCkfOqBCyLUIwSjKk2rK9llxUj/pR5aL8QCsXrxow==", + "license": "MIT", "dependencies": { - "vscode-languageserver-protocol": "3.17.6-next.12" + "vscode-languageserver-protocol": "3.17.6-next.16" }, "bin": { "installServerIntoExtension": "bin/installServerIntoExtension" } }, "node_modules/vscode-languageserver-protocol": { - "version": "3.17.6-next.12", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.6-next.12.tgz", - "integrity": "sha512-EqrbwF0glTWD2HiDpFc32pJOr6/bJvyKSfCpRQrKy3XsfdloH4p3o/rNJYcpujM0OVLmPZgl1i9g57z9g2YRJA==", + "version": "3.17.6-next.16", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.6-next.16.tgz", + "integrity": "sha512-kQTjXEuyxMbdmmZ3U+Lib3oUl12xEKNc73RtWxPSDS3TFtjVwt98Q1CUzfDA9EUpsA24M46Bl6q3sLe9AUOKyw==", + "license": "MIT", "dependencies": { - "vscode-jsonrpc": "9.0.0-next.7", + "vscode-jsonrpc": "9.0.0-next.11", "vscode-languageserver-types": "3.17.6-next.6" } }, @@ -13626,10 +14719,11 @@ } }, "node_modules/watchpack": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz", - "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.5.0.tgz", + "integrity": "sha512-e6vZvY6xboSwLz2GD36c16+O/2Z6fKvIf4pOXptw2rY9MVwE/TXc6RGqxD3I3x0a28lwBY7DE+76uTPSsBrrCA==", "dev": true, + "license": "MIT", "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" @@ -13649,35 +14743,37 @@ } }, "node_modules/webpack": { - "version": "5.98.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.98.0.tgz", - "integrity": "sha512-UFynvx+gM44Gv9qFgj0acCQK2VE1CtdfwFdimkapco3hlPCJ/zeq73n2yVKimVbtm+TnApIugGhLJnkU6gjYXA==", + "version": "5.104.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.104.1.tgz", + "integrity": "sha512-Qphch25abbMNtekmEGJmeRUhLDbe+QfiWTiqpKYkpCOWY64v9eyl+KRRLmqOFA2AvKPpc9DC6+u2n76tQLBoaA==", "dev": true, - "peer": true, + "license": "MIT", "dependencies": { "@types/eslint-scope": "^3.7.7", - "@types/estree": "^1.0.6", + "@types/estree": "^1.0.8", + "@types/json-schema": "^7.0.15", "@webassemblyjs/ast": "^1.14.1", "@webassemblyjs/wasm-edit": "^1.14.1", "@webassemblyjs/wasm-parser": "^1.14.1", - "acorn": "^8.14.0", - "browserslist": "^4.24.0", + "acorn": "^8.15.0", + "acorn-import-phases": "^1.0.3", + "browserslist": "^4.28.1", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.17.1", - "es-module-lexer": "^1.2.1", + "enhanced-resolve": "^5.17.4", + "es-module-lexer": "^2.0.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", - "loader-runner": "^4.2.0", + "loader-runner": "^4.3.1", "mime-types": "^2.1.27", "neo-async": "^2.6.2", - "schema-utils": "^4.3.0", - "tapable": "^2.1.1", - "terser-webpack-plugin": "^5.3.11", - "watchpack": "^2.4.1", - "webpack-sources": "^3.2.3" + "schema-utils": "^4.3.3", + "tapable": "^2.3.0", + "terser-webpack-plugin": "^5.3.16", + "watchpack": "^2.4.4", + "webpack-sources": "^3.3.3" }, "bin": { "webpack": "bin/webpack.js" @@ -13700,7 +14796,6 @@ "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.1.4.tgz", "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", "dev": true, - "peer": true, "dependencies": { "@discoveryjs/json-ext": "^0.5.0", "@webpack-cli/configtest": "^2.1.1", @@ -13821,10 +14916,11 @@ "dev": true }, "node_modules/webpack/node_modules/webpack-sources": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", - "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", + "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10.13.0" } @@ -13994,6 +15090,13 @@ "node": ">=0.10.0" } }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, "node_modules/wordwrapjs": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-4.0.1.tgz", @@ -14040,6 +15143,33 @@ "dev": true, "license": "ISC" }, + "node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/xdg-basedir": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", @@ -14211,16 +15341,16 @@ "stream-browserify": "^3.0.0", "util": "^0.12.5", "vm-browserify": "^1.1.2", - "vscode-languageserver": "^10.0.0-next.10" + "vscode-languageserver": "^10.0.0-next.13" }, "devDependencies": { "@types/copy-webpack-plugin": "^8.0.1", - "@types/node": "^22.16.0", - "copy-webpack-plugin": "^11.0.0", - "shx": "^0.3.4", - "ts-loader": "^9.5.1", + "@types/node": "^22.18.12", + "copy-webpack-plugin": "^12.0.2", + "shx": "^0.4.0", + "ts-loader": "^9.5.4", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4", "webpack-virtual-modules": "^0.6.2" }, @@ -14253,7 +15383,7 @@ }, "packages/pyright": { "name": "basedpyright", - "version": "1.1.407", + "version": "1.1.408", "license": "MIT", "bin": { "basedpyright": "index.js", @@ -14263,13 +15393,13 @@ }, "devDependencies": { "@types/is-ci": "^3.0.4", - "@types/node": "^22.16.0", - "copy-webpack-plugin": "^11.0.0", - "esbuild-loader": "^3.2.0", - "shx": "^0.3.4", - "ts-loader": "^9.5.1", + "@types/node": "^22.18.12", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", + "shx": "^0.4.0", + "ts-loader": "^9.5.4", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4" }, "engines": { @@ -14280,7 +15410,7 @@ } }, "packages/pyright-internal": { - "version": "1.1.407", + "version": "1.1.408", "license": "MIT", "dependencies": { "@actions/core": "^1.10.1", @@ -14293,66 +15423,1363 @@ "diff": "^7.0.0", "jsonc-parser": "^3.3.1", "pyright-to-gitlab-ci": "^0.1.3", - "smol-toml": "^1.3.1", + "smol-toml": "^1.4.2", "source-map-support": "^0.5.21", - "tmp": "^0.2.3", - "vscode-jsonrpc": "^9.0.0-next.5", - "vscode-languageserver": "^10.0.0-next.10", - "vscode-languageserver-protocol": "^3.17.6-next.10", + "tmp": "^0.2.5", + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13", "vscode-languageserver-textdocument": "^1.0.11", "vscode-languageserver-types": "^3.17.6-next.5", - "vscode-uri": "^3.0.8" + "vscode-uri": "^3.1.0" }, "devDependencies": { "@types/command-line-args": "^5.2.3", "@types/diff": "^5.2.2", "@types/fs-extra": "^11.0.4", - "@types/jest": "^29.5.14", - "@types/lodash": "^4.17.14", - "@types/node": "^22.16.0", + "@types/jest": "^30.0.0", + "@types/lodash": "^4.17.20", + "@types/node": "^22.18.12", "@types/tmp": "^0.2.6", - "copy-webpack-plugin": "^11.0.0", - "esbuild-loader": "^3.2.0", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", "fs-extra": "^11.2.0", - "jest": "^29.7.0", + "jest": "^30.2.0", "jest-junit": "^16.0.0", - "shx": "^0.3.4", - "ts-jest": "^29.2.5", - "ts-loader": "^9.5.1", + "shx": "^0.4.0", + "ts-jest": "^29.4.5", + "ts-loader": "^9.5.4", "typed-jest-expect": "^1.0.1", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4", "word-wrap": "1.2.5" } }, - "packages/pyright-internal/node_modules/@types/lodash": { - "version": "4.17.16", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.16.tgz", - "integrity": "sha512-HX7Em5NYQAXKW+1T+FiuG27NGwzJfCX3s1GjOa7ujxZa52kjJLOr4FUxT+giF6Tgxv1e+/czV/iTtBw27WTU9g==", - "dev": true + "packages/pyright-internal/node_modules/@jest/console": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-30.2.0.tgz", + "integrity": "sha512-+O1ifRjkvYIkBqASKWgLxrpEhQAAE7hY77ALLUufSk5717KfOShg6IbqLmdsLMPdUiFvA2kTs0R7YZy+l0IzZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/console/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/@jest/core": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-30.2.0.tgz", + "integrity": "sha512-03W6IhuhjqTlpzh/ojut/pDB2LPRygyWX8ExpgHtQA8H/3K7+1vKmcINx5UzeOX1se6YEsBsOHQ1CRzf3fOwTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/pattern": "30.0.1", + "@jest/reporters": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-changed-files": "30.2.0", + "jest-config": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-resolve-dependencies": "30.2.0", + "jest-runner": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "jest-watcher": "30.2.0", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/@jest/core/node_modules/jest-config": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.2.0.tgz", + "integrity": "sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.1.0", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.2.0", + "@jest/types": "30.2.0", + "babel-jest": "30.2.0", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-circus": "30.2.0", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-runner": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "micromatch": "^4.0.8", + "parse-json": "^5.2.0", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "esbuild-register": ">=3.4.0", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "esbuild-register": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/@jest/core/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/@jest/environment": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-30.2.0.tgz", + "integrity": "sha512-/QPTL7OBJQ5ac09UDRa3EQes4gt1FTEG/8jZ/4v5IVzx+Cv7dLxlVIvfvSVRiiX2drWyXeBjkMSR8hvOWSog5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-V9yxQK5erfzx99Sf+7LbhBwNWEZ9eZay8qQ9+JSC0TrMR1pMDHLMY+BnVPacWU6Jamrh252/IKo4F1Xn/zfiqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "30.2.0", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/expect-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-30.2.0.tgz", + "integrity": "sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/fake-timers": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-30.2.0.tgz", + "integrity": "sha512-HI3tRLjRxAbBy0VO8dqqm7Hb2mIa8d5bg/NJkyQcOk7V118ObQML8RC5luTF/Zsg4474a+gDvhce7eTnP4GhYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@sinonjs/fake-timers": "^13.0.0", + "@types/node": "*", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/globals": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-30.2.0.tgz", + "integrity": "sha512-b63wmnKPaK+6ZZfpYhz9K61oybvbI1aMcIs80++JI1O1rR1vaxHUCNqo3ITu6NU0d4V34yZFoHMn/uoKr/Rwfw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/types": "30.2.0", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/reporters": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-30.2.0.tgz", + "integrity": "sha512-DRyW6baWPqKMa9CzeiBjHwjd8XeAyco2Vt8XbcLFjiwCOEKOvy82GJ8QQnJE9ofsxCMPjH4MfH8fCWIHHDKpAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "@types/node": "*", + "chalk": "^4.1.2", + "collect-v8-coverage": "^1.0.2", + "exit-x": "^0.2.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^5.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "slash": "^3.0.0", + "string-length": "^4.0.2", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/@jest/reporters/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/source-map": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-30.0.1.tgz", + "integrity": "sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "callsites": "^3.1.0", + "graceful-fs": "^4.2.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/test-result": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-30.2.0.tgz", + "integrity": "sha512-RF+Z+0CCHkARz5HT9mcQCBulb1wgCP3FBvl9VFokMX27acKphwyQsNuWH3c+ojd1LeWBLoTYoxF0zm6S/66mjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/types": "30.2.0", + "@types/istanbul-lib-coverage": "^2.0.6", + "collect-v8-coverage": "^1.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/test-sequencer": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-30.2.0.tgz", + "integrity": "sha512-wXKgU/lk8fKXMu/l5Hog1R61bL4q5GCdT6OJvdAFz1P+QrpoFuLU68eoKuVc4RbrTtNnTL5FByhWdLgOPSph+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/test-sequencer/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@jest/transform/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/@sinclair/typebox": { + "version": "0.34.47", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.47.tgz", + "integrity": "sha512-ZGIBQ+XDvO5JQku9wmwtabcVTHJsgSWAHYtVuM9pBNNR5E88v6Jcj/llpmsjivig5X8A8HHOb4/mbEKPS5EvAw==", + "dev": true, + "license": "MIT" + }, + "packages/pyright-internal/node_modules/@sinonjs/fake-timers": { + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz", + "integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.1" + } + }, + "packages/pyright-internal/node_modules/@types/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-FOvQ0YPD5NOfPgMzJihoT+Za5pdkDJWcbpuj1DjaKZIr/gxodQjY/uWEFlTNqW2ugXHUiL8lRQgw63dzKHZdeQ==", + "dev": true, + "license": "MIT" + }, + "packages/pyright-internal/node_modules/@types/yargs": { + "version": "17.0.35", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz", + "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "packages/pyright-internal/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/pyright-internal/node_modules/babel-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.2.0.tgz", + "integrity": "sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "30.2.0", + "@types/babel__core": "^7.20.5", + "babel-plugin-istanbul": "^7.0.1", + "babel-preset-jest": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-0" + } + }, + "packages/pyright-internal/node_modules/babel-jest/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "packages/pyright-internal/node_modules/babel-plugin-jest-hoist": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.2.0.tgz", + "integrity": "sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/babel__core": "^7.20.5" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/babel-preset-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.2.0.tgz", + "integrity": "sha512-US4Z3NOieAQumwFnYdUWKvUKh8+YSnS/gB3t6YBiz0bskpu7Pine8pPCheNxlPEW4wnUkma2a94YuW2q3guvCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-beta.1" + } + }, + "packages/pyright-internal/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/pyright-internal/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/cjs-module-lexer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-2.2.0.tgz", + "integrity": "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==", + "dev": true, + "license": "MIT" + }, + "packages/pyright-internal/node_modules/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/glob": { + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "packages/pyright-internal/node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "packages/pyright-internal/node_modules/jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-30.2.0.tgz", + "integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/types": "30.2.0", + "import-local": "^3.2.0", + "jest-cli": "30.2.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/jest-changed-files": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-30.2.0.tgz", + "integrity": "sha512-L8lR1ChrRnSdfeOvTrwZMlnWV8G/LLjQ0nG9MBclwWZidA2N5FviRki0Bvh20WRMOX31/JYvzdqTJrk5oBdydQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.1.1", + "jest-util": "30.2.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-circus": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-30.2.0.tgz", + "integrity": "sha512-Fh0096NC3ZkFx05EP2OXCxJAREVxj1BcW/i6EWqqymcgYKWjyyDpral3fMxVcHXg6oZM7iULer9wGRFvfpl+Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "co": "^4.6.0", + "dedent": "^1.6.0", + "is-generator-fn": "^2.1.0", + "jest-each": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "p-limit": "^3.1.0", + "pretty-format": "30.2.0", + "pure-rand": "^7.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-circus/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/jest-cli": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-30.2.0.tgz", + "integrity": "sha512-Os9ukIvADX/A9sLt6Zse3+nmHtHaE6hqOsjQtNiugFTbKRHYIYtZXNGNK9NChseXy7djFPjndX1tL0sCTlfpAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "exit-x": "^0.2.2", + "import-local": "^3.2.0", + "jest-config": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "yargs": "^17.7.2" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/jest-cli/node_modules/jest-config": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.2.0.tgz", + "integrity": "sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.1.0", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.2.0", + "@jest/types": "30.2.0", + "babel-jest": "30.2.0", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-circus": "30.2.0", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-runner": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "micromatch": "^4.0.8", + "parse-json": "^5.2.0", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "esbuild-register": ">=3.4.0", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "esbuild-register": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "packages/pyright-internal/node_modules/jest-cli/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/jest-diff": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-30.2.0.tgz", + "integrity": "sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-docblock": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-30.2.0.tgz", + "integrity": "sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-each": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-30.2.0.tgz", + "integrity": "sha512-lpWlJlM7bCUf1mfmuqTA8+j2lNURW9eNafOy99knBM01i5CQeY5UH1vZjgT9071nDJac1M4XsbyI44oNOdhlDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "jest-util": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-environment-node": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-30.2.0.tgz", + "integrity": "sha512-ElU8v92QJ9UrYsKrxDIKCxu6PfNj4Hdcktcn0JX12zqNdqWHB0N+hwOnnBBXvjLd2vApZtuLUGs1QSY+MsXoNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "packages/pyright-internal/node_modules/jest-leak-detector": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-30.2.0.tgz", + "integrity": "sha512-M6jKAjyzjHG0SrQgwhgZGy9hFazcudwCNovY/9HPIicmNSBuockPSedAP9vlPK6ONFJ1zfyH/M2/YYJxOz5cdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-matcher-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-30.2.0.tgz", + "integrity": "sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "jest-diff": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-message-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-30.2.0.tgz", + "integrity": "sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.2.0", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-message-util/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/jest-mock": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-30.2.0.tgz", + "integrity": "sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-resolve": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-30.2.0.tgz", + "integrity": "sha512-TCrHSxPlx3tBY3hWNtRQKbtgLhsXa1WmbJEqBlTBrGafd5fiQFByy2GNCEoGR+Tns8d15GaL9cxEzKOO3GEb2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-pnp-resolver": "^1.2.3", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "slash": "^3.0.0", + "unrs-resolver": "^1.7.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-resolve-dependencies": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-30.2.0.tgz", + "integrity": "sha512-xTOIGug/0RmIe3mmCqCT95yO0vj6JURrn1TKWlNbhiAefJRWINNPgwVkrVgt/YaerPzY3iItufd80v3lOrFJ2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "30.0.1", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-resolve/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/jest-runner": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-30.2.0.tgz", + "integrity": "sha512-PqvZ2B2XEyPEbclp+gV6KO/F1FIFSbIwewRgmROCMBo/aZ6J1w8Qypoj2pEOcg3G2HzLlaP6VUtvwCI8dM3oqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/environment": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-leak-detector": "30.2.0", + "jest-message-util": "30.2.0", + "jest-resolve": "30.2.0", + "jest-runtime": "30.2.0", + "jest-util": "30.2.0", + "jest-watcher": "30.2.0", + "jest-worker": "30.2.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-runner/node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "packages/pyright-internal/node_modules/jest-runtime": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-30.2.0.tgz", + "integrity": "sha512-p1+GVX/PJqTucvsmERPMgCPvQJpFt4hFbM+VN3n8TMo47decMUcJbt+rgzwrEme0MQUA/R+1de2axftTHkKckg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/globals": "30.2.0", + "@jest/source-map": "30.0.1", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "cjs-module-lexer": "^2.1.0", + "collect-v8-coverage": "^1.0.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-runtime/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "packages/pyright-internal/node_modules/jest-snapshot": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-30.2.0.tgz", + "integrity": "sha512-5WEtTy2jXPFypadKNpbNkZ72puZCa6UjSr/7djeecHWOu7iYhSXSnHScT8wBz3Rn8Ena5d5RYRcsyKIeqG1IyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@babel/generator": "^7.27.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/types": "^7.27.3", + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "@jest/snapshot-utils": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0", + "chalk": "^4.1.2", + "expect": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-diff": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "pretty-format": "30.2.0", + "semver": "^7.7.2", + "synckit": "^0.11.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-validate": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-30.2.0.tgz", + "integrity": "sha512-FBGWi7dP2hpdi8nBoWxSsLvBFewKAg0+uSQwBaof4Y4DPgBabXgpSYC5/lR7VmnIlSpASmCi/ntRWPbv7089Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "camelcase": "^6.3.0", + "chalk": "^4.1.2", + "leven": "^3.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-watcher": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-30.2.0.tgz", + "integrity": "sha512-PYxa28dxJ9g777pGm/7PrbnMeA0Jr7osHP9bS7eJy9DuAjMgdGtxgf0uKMyoIsTWAkIbUW5hSDdJ3urmgXBqxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "jest-util": "30.2.0", + "string-length": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "packages/pyright-internal/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "packages/pyright-internal/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "packages/pyright-internal/node_modules/pure-rand": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-7.0.1.tgz", + "integrity": "sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "packages/pyright-internal/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "packages/pyright-internal/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } }, "packages/vscode-pyright": { - "version": "1.1.407", + "version": "1.1.408", "license": "MIT", "dependencies": { "@vscode/python-extension": "^1.0.5", - "vscode-jsonrpc": "^9.0.0-next.5", - "vscode-languageclient": "^10.0.0-next.12", - "vscode-languageserver": "^10.0.0-next.10", - "vscode-languageserver-protocol": "^3.17.6-next.10" + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageclient": "^10.0.0-next.16", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13" }, "devDependencies": { - "@types/node": "^22.16.0", + "@types/node": "^22.18.12", "@types/vscode": "^1.101.0", "@vscode/vsce": "^2.32.0", - "copy-webpack-plugin": "^11.0.0", - "esbuild-loader": "^3.2.0", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", "ovsx": "^0.8.3", - "shx": "^0.3.4", - "ts-loader": "^9.5.1", + "shx": "^0.4.0", + "ts-loader": "^9.5.4", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4" }, "engines": { diff --git a/package.json b/package.json index 991142fde5..f01dd7a1e8 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "watch:testserver": "cd packages/pyright-internal && npm run webpack:testserver:watch", "check": "npm run check:syncpack && npm run check:eslint && npm run check:prettier", "check:syncpack": "syncpack list-mismatches", - "fix:syncpack": "syncpack fix-mismatches --indent \" \" && npm run install:all", + "fix:syncpack": "syncpack fix-mismatches --indent \" \" && npm install", "check:eslint": "eslint .", "fix:eslint": "eslint --fix .", "check:prettier": "prettier -c .", @@ -23,7 +23,7 @@ "typecheck-python": "uv run --no-sync basedpyright", "ruff-check": "uv run --no-sync ruff check --no-cache && uv run --no-sync ruff format --check --diff", "ruff-fix": "uv run --no-sync ruff check --no-cache --fix && uv run --no-sync ruff format", - "pylint": "uv run --no-sync pylint basedpyright build pdm_build.py", + "pylint": "uv run --no-sync pylint basedpyright build pdm_build.py --ignore-paths build/py_latest/upstream", "test-python": "uv run --no-sync pytest tests", "generate-docstubs": "uv run --no-sync build/py3_8/generate_docstubs.py", "localization-helper": "uv run --no-sync build/py_latest/localization_helper.py", @@ -35,13 +35,13 @@ "devDependencies": { "@detachhead/ts-helpers": "^16.2.0", "@types/glob": "^8.1.0", - "@types/node": "^22.16.0", + "@types/node": "^22.18.12", "@types/yargs": "^16.0.9", "@typescript-eslint/eslint-plugin": "^6.21.0", "@typescript-eslint/parser": "^6.21.0", - "cross-env": "^7.0.3", + "cross-env": "^10.1.0", "eslint": "^8.57.1", - "eslint-config-prettier": "^8.10.0", + "eslint-config-prettier": "^8.10.2", "eslint-plugin-simple-import-sort": "^10.0.0", "glob": "^8.1.0", "jsonc-parser": "^3.3.1", @@ -54,5 +54,7 @@ "word-wrap": "1.2.5", "yargs": "^16.2.0" }, - "workspaces": ["packages/*"] + "workspaces": [ + "packages/*" + ] } diff --git a/packages/browser-pyright/package.json b/packages/browser-pyright/package.json index 232905ef0d..d85e06427b 100644 --- a/packages/browser-pyright/package.json +++ b/packages/browser-pyright/package.json @@ -26,12 +26,12 @@ }, "devDependencies": { "@types/copy-webpack-plugin": "^8.0.1", - "@types/node": "^22.16.0", - "copy-webpack-plugin": "^11.0.0", - "shx": "^0.3.4", - "ts-loader": "^9.5.1", + "@types/node": "^22.18.12", + "copy-webpack-plugin": "^12.0.2", + "shx": "^0.4.0", + "ts-loader": "^9.5.4", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4", "webpack-virtual-modules": "^0.6.2" }, @@ -48,6 +48,6 @@ "stream-browserify": "^3.0.0", "util": "^0.12.5", "vm-browserify": "^1.1.2", - "vscode-languageserver": "^10.0.0-next.10" + "vscode-languageserver": "^10.0.0-next.13" } } diff --git a/packages/pyright-internal/jest.config.js b/packages/pyright-internal/jest.config.js index 9fd7c0fd92..c97b52d0e3 100644 --- a/packages/pyright-internal/jest.config.js +++ b/packages/pyright-internal/jest.config.js @@ -12,6 +12,9 @@ module.exports = { 'ts-jest', { tsconfig: 'src/tests/tsconfig.json', + diagnostics: { + ignoreCodes: [151002], + }, }, ], }, diff --git a/packages/pyright-internal/package.json b/packages/pyright-internal/package.json index c90d52e1b5..6a4f012e9d 100644 --- a/packages/pyright-internal/package.json +++ b/packages/pyright-internal/package.json @@ -2,7 +2,7 @@ "name": "pyright-internal", "displayName": "pyright", "description": "Type checker for the Python language", - "version": "1.1.407", + "version": "1.1.408", "license": "MIT", "private": true, "files": [ @@ -13,10 +13,10 @@ "clean": "shx rm -rf ./dist ./out", "webpack:testserver": "webpack --config ./src/tests/lsp/webpack.testserver.config.js --mode=development", "webpack:testserver:watch": "npm run clean && webpack --config ./src/tests/lsp/webpack.testserver.config.js --mode development --watch --progress", - "test": "npm run webpack:testserver && jest --forceExit", - "test:norebuild": "jest --forceExit", - "test:coverage": "jest --forceExit --reporters=jest-junit --reporters=default --coverage --coverageReporters=cobertura --coverageReporters=html --coverageReporters=json", - "test:imports": "jest importResolver.test --forceExit --runInBand" + "test": "npm run webpack:testserver && node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit", + "test:norebuild": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit", + "test:coverage": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit --reporters=jest-junit --reporters=default --coverage --coverageReporters=cobertura --coverageReporters=html --coverageReporters=json", + "test:imports": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest importResolver.test --forceExit --runInBand" }, "dependencies": { "@actions/core": "^1.10.1", @@ -29,35 +29,35 @@ "diff": "^7.0.0", "jsonc-parser": "^3.3.1", "pyright-to-gitlab-ci": "^0.1.3", - "smol-toml": "^1.3.1", + "smol-toml": "^1.4.2", "source-map-support": "^0.5.21", - "tmp": "^0.2.3", - "vscode-jsonrpc": "^9.0.0-next.5", - "vscode-languageserver": "^10.0.0-next.10", - "vscode-languageserver-protocol": "^3.17.6-next.10", + "tmp": "^0.2.5", + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13", "vscode-languageserver-textdocument": "^1.0.11", "vscode-languageserver-types": "^3.17.6-next.5", - "vscode-uri": "^3.0.8" + "vscode-uri": "^3.1.0" }, "devDependencies": { "@types/command-line-args": "^5.2.3", "@types/diff": "^5.2.2", "@types/fs-extra": "^11.0.4", - "@types/jest": "^29.5.14", - "@types/lodash": "^4.17.14", - "@types/node": "^22.16.0", + "@types/jest": "^30.0.0", + "@types/lodash": "^4.17.20", + "@types/node": "^22.18.12", "@types/tmp": "^0.2.6", - "copy-webpack-plugin": "^11.0.0", - "esbuild-loader": "^3.2.0", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", "fs-extra": "^11.2.0", - "jest": "^29.7.0", + "jest": "^30.2.0", "jest-junit": "^16.0.0", - "shx": "^0.3.4", - "ts-jest": "^29.2.5", - "ts-loader": "^9.5.1", + "shx": "^0.4.0", + "ts-jest": "^29.4.5", + "ts-loader": "^9.5.4", "typed-jest-expect": "^1.0.1", "typescript": "~5.5.4", - "webpack": "^5.97.1", + "webpack": "^5.102.1", "webpack-cli": "^5.1.4", "word-wrap": "1.2.5" } diff --git a/packages/pyright-internal/src/analyzer/binder.ts b/packages/pyright-internal/src/analyzer/binder.ts index 798ecf752a..3fe4d89231 100644 --- a/packages/pyright-internal/src/analyzer/binder.ts +++ b/packages/pyright-internal/src/analyzer/binder.ts @@ -17,7 +17,6 @@ */ import { Commands } from '../commands/commands'; -import { appendArray } from '../common/collectionUtils'; import { DiagnosticLevel } from '../common/configOptions'; import { assert, assertNever, fail } from '../common/debug'; import { CreateTypeStubFileAction, Diagnostic, DiagnosticAddendum } from '../common/diagnostic'; @@ -81,7 +80,7 @@ import { YieldNode, } from '../parser/parseNodes'; import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; -import { AnalyzerFileInfo, ImportLookupResult } from './analyzerFileInfo'; +import { AnalyzerFileInfo } from './analyzerFileInfo'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; import { CodeFlowReferenceExpressionNode, @@ -119,6 +118,7 @@ import { VariableDeclaration, } from './declaration'; import { ImplicitImport, ImportResult, ImportType } from './importResult'; +import { getWildcardImportNames } from './importStatementUtils'; import * as ParseTreeUtils from './parseTreeUtils'; import { ParseTreeWalker } from './parseTreeWalker'; import { moduleIsInList } from './pythonPathUtils'; @@ -1881,7 +1881,7 @@ export class Binder extends ParseTreeWalker { const lookupInfo = this._fileInfo.importLookup(resolvedPath); if (lookupInfo) { - const wildcardNames = this._getWildcardImportNames(lookupInfo); + const wildcardNames = getWildcardImportNames(lookupInfo); if (isModuleInitFile) { // If the symbol is going to be immediately replaced with a same-named @@ -2809,27 +2809,6 @@ export class Binder extends ParseTreeWalker { } } - private _getWildcardImportNames(lookupInfo: ImportLookupResult): string[] { - const namesToImport: string[] = []; - - // If a dunder all symbol is defined, it takes precedence. - if (lookupInfo.dunderAllNames) { - if (!lookupInfo.usesUnsupportedDunderAllForm) { - return lookupInfo.dunderAllNames; - } - - appendArray(namesToImport, lookupInfo.dunderAllNames); - } - - lookupInfo.symbolTable.forEach((symbol, name) => { - if (!symbol.isExternallyHidden() && !name.startsWith('_')) { - namesToImport!.push(name); - } - }); - - return namesToImport; - } - private _walkStatementsAndReportUnreachable(statements: StatementNode[]) { let foundUnreachableStatement = false; diff --git a/packages/pyright-internal/src/analyzer/declarationUtils.ts b/packages/pyright-internal/src/analyzer/declarationUtils.ts index 3caea332e2..8893427b70 100644 --- a/packages/pyright-internal/src/analyzer/declarationUtils.ts +++ b/packages/pyright-internal/src/analyzer/declarationUtils.ts @@ -150,6 +150,10 @@ export function getNameFromDeclaration(declaration: Declaration) { } export function getNameNodeForDeclaration(declaration: Declaration): NameNode | undefined { + if (declaration.node === undefined) { + return undefined; + } + switch (declaration.type) { case DeclarationType.Alias: if (declaration.node.nodeType === ParseNodeType.ImportAs) { diff --git a/packages/pyright-internal/src/analyzer/decorators.ts b/packages/pyright-internal/src/analyzer/decorators.ts index 0ce2d816f6..43b2814b93 100644 --- a/packages/pyright-internal/src/analyzer/decorators.ts +++ b/packages/pyright-internal/src/analyzer/decorators.ts @@ -346,6 +346,7 @@ export function applyClassDecorator( applyDataClassDecorator(evaluator, decoratorNode, originalClassType, dataclassBehaviors, callNode); return true; } + return false; }; diff --git a/packages/pyright-internal/src/analyzer/importStatementUtils.ts b/packages/pyright-internal/src/analyzer/importStatementUtils.ts index 9dce8c6b5c..5068fd4994 100644 --- a/packages/pyright-internal/src/analyzer/importStatementUtils.ts +++ b/packages/pyright-internal/src/analyzer/importStatementUtils.ts @@ -33,6 +33,7 @@ import { import { ParseFileResults } from '../parser/parser'; import { TokenType } from '../parser/tokenizerTypes'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ImportLookupResult } from './analyzerFileInfo'; import { ModuleNameAndType } from './importResolver'; import { ImportResult, ImportType } from './importResult'; import { getTokenAfter, getTokenAt } from './parseTreeUtils'; @@ -980,3 +981,25 @@ export function haveSameParentModule(module1: string[], module2: string[]) { return i === module1.length - 1; } + +// Helper function to get the list of names that would be imported by a wildcard import +export function getWildcardImportNames(lookupInfo: ImportLookupResult): string[] { + const namesToImport: string[] = []; + + // If a dunder all symbol is defined, it takes precedence. + if (lookupInfo.dunderAllNames) { + if (!lookupInfo.usesUnsupportedDunderAllForm) { + return lookupInfo.dunderAllNames; + } + + appendArray(namesToImport, lookupInfo.dunderAllNames); + } + + lookupInfo.symbolTable.forEach((symbol, name) => { + if (!symbol.isExternallyHidden() && !name.startsWith('_')) { + namesToImport!.push(name); + } + }); + + return namesToImport; +} diff --git a/packages/pyright-internal/src/analyzer/patternMatching.ts b/packages/pyright-internal/src/analyzer/patternMatching.ts index 360ccb5b54..192f8268d2 100644 --- a/packages/pyright-internal/src/analyzer/patternMatching.ts +++ b/packages/pyright-internal/src/analyzer/patternMatching.ts @@ -1460,7 +1460,7 @@ function getSequencePatternInfo( { type: UnknownType.create(), isUnbounded: true }, ]; - const tupleIndeterminateIndex = typeArgs.findIndex( + let tupleIndeterminateIndex = typeArgs.findIndex( (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) ); @@ -1468,15 +1468,20 @@ function getSequencePatternInfo( // If the tuple contains an indeterminate entry, expand or remove that // entry to match the length of the pattern if possible. + let expandedIndeterminate = false; if (tupleIndeterminateIndex >= 0) { tupleDeterminateEntryCount--; while (typeArgs.length < patternEntryCount) { typeArgs.splice(tupleIndeterminateIndex, 0, typeArgs[tupleIndeterminateIndex]); + tupleDeterminateEntryCount++; + tupleIndeterminateIndex++; + expandedIndeterminate = true; } if (typeArgs.length > patternEntryCount && patternStarEntryIndex === undefined) { typeArgs.splice(tupleIndeterminateIndex, 1); + tupleIndeterminateIndex = -1; } } @@ -1495,6 +1500,20 @@ function getSequencePatternInfo( (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) ), }); + + tupleDeterminateEntryCount -= entriesToCombine; + if (!typeArgs[patternStarEntryIndex].isUnbounded) { + tupleDeterminateEntryCount++; + } + + // If the collapsed range included the tupleIndeterminateIndex, adjust + // it to reflect the new collapsed entry. + if ( + tupleIndeterminateIndex >= patternStarEntryIndex && + tupleIndeterminateIndex < patternStarEntryIndex + entriesToCombine + ) { + tupleIndeterminateIndex = patternStarEntryIndex; + } } if (typeArgs.length === patternEntryCount) { @@ -1505,6 +1524,7 @@ function getSequencePatternInfo( // indeterminate-length entry that aligns to the star entry, we can // assume it will always match. if ( + !expandedIndeterminate && patternStarEntryIndex !== undefined && tupleIndeterminateIndex >= 0 && pattern.d.entries.length - 1 === tupleDeterminateEntryCount && diff --git a/packages/pyright-internal/src/analyzer/typeEvaluator.ts b/packages/pyright-internal/src/analyzer/typeEvaluator.ts index 3668b28cd3..2cac29bf5f 100644 --- a/packages/pyright-internal/src/analyzer/typeEvaluator.ts +++ b/packages/pyright-internal/src/analyzer/typeEvaluator.ts @@ -417,7 +417,7 @@ interface ScopedTypeVarResult { interface AliasMapEntry { alias: string; - module: 'builtins' | 'collections' | 'contextlib' | 'self'; + module: 'builtins' | 'collections' | 'contextlib' | 'internals'; implicitBaseClass?: string; isSpecialForm?: boolean; isIllegalInIsinstance?: boolean; @@ -1041,9 +1041,9 @@ export function createTypeEvaluator( prefetched.strClass = getBuiltInType(node, 'str'); prefetched.dictClass = getBuiltInType(node, 'dict'); prefetched.moduleTypeClass = getTypingType(node, 'ModuleType'); - prefetched.typedDictClass = getTypingType(node, 'TypedDict'); prefetched.typedDictPrivateClass = getTypeCheckerInternalsType(node, 'TypedDictFallback') ?? getTypingType(node, '_TypedDict'); + prefetched.typedDictClass = getTypingType(node, 'TypedDict'); prefetched.awaitableClass = getTypingType(node, 'Awaitable'); prefetched.mappingClass = getTypingType(node, 'Mapping'); @@ -12727,13 +12727,28 @@ export function createTypeEvaluator( if (matchResults.argumentErrors) { // Evaluate types of all args. This will ensure that referenced symbols are - // not reported as unaccessed. + // not reported as unaccessed. Also pass the expected parameter type as + // inference context to enable proper completions even when there are errors. + matchResults.argParams.forEach((argParam) => { + if (argParam.argument.valueExpression && !isSpeculativeModeInUse(argParam.argument.valueExpression)) { + getTypeOfExpression( + argParam.argument.valueExpression, + /* flags */ undefined, + makeInferenceContext(argParam.paramType) + ); + } + }); + + // Also evaluate any arguments that weren't matched to parameters argList.forEach((arg) => { if (arg.valueExpression && !isSpeculativeModeInUse(arg.valueExpression)) { - getTypeOfExpression(arg.valueExpression); + // Check if this argument was already evaluated above + const wasEvaluated = matchResults.argParams.some((argParam) => argParam.argument === arg); + if (!wasEvaluated) { + getTypeOfExpression(arg.valueExpression); + } } }); - // Use a return type of Unknown but attach a "possible type" to it // so the completion provider can suggest better completions. const possibleType = FunctionType.getEffectiveReturnType(typeResult.type); @@ -13010,7 +13025,10 @@ export function createTypeEvaluator( if (argParam.argType) { argType = argParam.argType; } else { - const argTypeResult = getTypeOfArg(argParam.argument, /* inferenceContext */ undefined); + const argTypeResult = getTypeOfArg( + argParam.argument, + makeInferenceContext(argParam.paramType, isTypeIncomplete) + ); argType = argTypeResult.type; if (argTypeResult.isIncomplete) { isTypeIncomplete = true; @@ -17094,11 +17112,12 @@ export function createTypeEvaluator( } else if (aliasMapEntry.module === 'collections' || aliasMapEntry.module === 'contextlib') { // The typing.pyi file imports collections. baseClass = getTypeOfModule(node, baseClassName, [aliasMapEntry.module]); - } else if (aliasMapEntry.module === 'self') { - const symbolWithScope = lookUpSymbolRecursive(node, baseClassName, /* honorCodeFlow */ false); - if (symbolWithScope) { - baseClass = getEffectiveTypeOfSymbol(symbolWithScope.symbol); - // The _TypedDict class is marked as abstract, but the + } else if (aliasMapEntry.module === 'internals') { + // Handle TypedDict specially. + assert(baseClassName === 'TypedDictFallback'); + baseClass = prefetched?.typedDictPrivateClass; + if (baseClass) { + // The TypedDictFallback class is marked as abstract, but the // methods that are abstract are overridden and shouldn't // cause the TypedDict to be marked as abstract. if ( @@ -17156,7 +17175,7 @@ export function createTypeEvaluator( ['ClassVar', { alias: '', module: 'builtins', isSpecialForm: true }], ['Final', { alias: '', module: 'builtins', isSpecialForm: true }], ['Literal', { alias: '', module: 'builtins', isSpecialForm: true }], - ['TypedDict', { alias: '_TypedDict', module: 'self' }], + ['TypedDict', { alias: 'TypedDictFallback', module: 'internals' }], ['Union', { alias: '', module: 'builtins', isSpecialForm: true }], ['Optional', { alias: '', module: 'builtins', isSpecialForm: true }], ['Annotated', { alias: '', module: 'builtins', isSpecialForm: true, isIllegalInIsinstance: true }], diff --git a/packages/pyright-internal/src/common/serviceKeys.ts b/packages/pyright-internal/src/common/serviceKeys.ts index 0732e82709..3617bc2bf0 100644 --- a/packages/pyright-internal/src/common/serviceKeys.ts +++ b/packages/pyright-internal/src/common/serviceKeys.ts @@ -24,19 +24,21 @@ import { CommandService, WindowService } from './languageServerInterface'; import { GroupServiceKey, ServiceKey } from './serviceProvider'; export namespace ServiceKeys { - export const fs = new ServiceKey(); - export const console = new ServiceKey(); - export const sourceFileFactory = new ServiceKey(); - export const partialStubs = new ServiceKey(); - export const symbolDefinitionProvider = new GroupServiceKey(); - export const symbolUsageProviderFactory = new GroupServiceKey(); - export const stateMutationListeners = new GroupServiceKey(); - export const tempFile = new ServiceKey(); - export const cacheManager = new ServiceKey(); - export const debugInfoInspector = new ServiceKey(); - export const caseSensitivityDetector = new ServiceKey(); - export const docStringService = new ServiceKey(); - export const windowService = new ServiceKey(); - export const commandService = new ServiceKey(); - export const cancellationProvider = new ServiceKey(); + export const fs = new ServiceKey('fs'); + export const console = new ServiceKey('ConsoleInterface'); + export const sourceFileFactory = new ServiceKey('ISourceFileFactory'); + export const partialStubs = new ServiceKey('SupportPartialStubs'); + export const symbolDefinitionProvider = new GroupServiceKey('SymbolDefinitionProvider'); + export const symbolUsageProviderFactory = new GroupServiceKey( + 'SymbolUsageProviderFactory' + ); + export const stateMutationListeners = new GroupServiceKey('StatusMutationListener'); + export const tempFile = new ServiceKey('TempFile'); + export const cacheManager = new ServiceKey('CacheManager'); + export const debugInfoInspector = new ServiceKey('DebugInfoInspector'); + export const caseSensitivityDetector = new ServiceKey('CaseSensitivityDetector'); + export const docStringService = new ServiceKey('DocStringService'); + export const windowService = new ServiceKey('WindowService'); + export const commandService = new ServiceKey('CommandService'); + export const cancellationProvider = new ServiceKey('CancellationProvider'); } diff --git a/packages/pyright-internal/src/common/serviceProvider.ts b/packages/pyright-internal/src/common/serviceProvider.ts index eab4203113..11dde5a71b 100644 --- a/packages/pyright-internal/src/common/serviceProvider.ts +++ b/packages/pyright-internal/src/common/serviceProvider.ts @@ -12,6 +12,7 @@ import * as debug from './debug'; abstract class InternalKey { abstract readonly kind: 'singleton' | 'group'; + abstract readonly id: string; } /** @@ -20,6 +21,9 @@ abstract class InternalKey { // eslint-disable-next-line @typescript-eslint/no-unused-vars export class ServiceKey extends InternalKey { readonly kind = 'singleton'; + constructor(readonly id: string) { + super(); + } } /** @@ -28,12 +32,15 @@ export class ServiceKey extends InternalKey { // eslint-disable-next-line @typescript-eslint/no-unused-vars export class GroupServiceKey extends InternalKey { readonly kind = 'group'; + constructor(readonly id: string) { + super(); + } } export type AllServiceKeys = ServiceKey | GroupServiceKey; export class ServiceProvider { - private _container = new Map(); + private _container = new Map(); add(key: ServiceKey, value: T | undefined): void; add(key: GroupServiceKey, value: T): void; @@ -45,7 +52,7 @@ export class ServiceProvider { if (key.kind === 'singleton') { if (value !== undefined) { - this._container.set(key, value); + this._container.set(key.id, value); } else { this.remove(key); } @@ -64,7 +71,7 @@ export class ServiceProvider { } if (key.kind === 'singleton') { - this._container.delete(key); + this._container.delete(key.id); return; } @@ -74,7 +81,7 @@ export class ServiceProvider { tryGet(key: ServiceKey): T | undefined; tryGet(key: GroupServiceKey): readonly T[] | undefined; tryGet(key: AllServiceKeys): T | readonly T[] | undefined { - return this._container.get(key); + return this._container.get(key.id); } get(key: ServiceKey): T; @@ -91,7 +98,7 @@ export class ServiceProvider { clone() { const serviceProvider = new ServiceProvider(); this._container.forEach((value, key) => { - if (key.kind === 'group') { + if (Array.isArray(value)) { serviceProvider._container.set(key, [...(value ?? [])]); } else if (value.clone !== undefined) { serviceProvider._container.set(key, value.clone()); @@ -115,7 +122,7 @@ export class ServiceProvider { // Explicitly cast to remove `readonly` const services = this.tryGet(key) as T[] | undefined; if (services === undefined) { - this._container.set(key, [newValue]); + this._container.set(key.id, [newValue]); return; } diff --git a/packages/pyright-internal/src/languageServerBase.ts b/packages/pyright-internal/src/languageServerBase.ts index 755209e7aa..14c1a9bba2 100644 --- a/packages/pyright-internal/src/languageServerBase.ts +++ b/packages/pyright-internal/src/languageServerBase.ts @@ -478,6 +478,10 @@ export abstract class LanguageServerBase implements LanguageServerInterface, Dis return fs.getOriginalUri(uri).toString(); }; + protected get workspaceDiagnosticsReporter() { + return this._workspaceDiagnosticsReporter; + } + protected abstract executeCommand(params: ExecuteCommandParams, token: CancellationToken): Promise; protected abstract isLongRunningCommand(command: string): boolean; diff --git a/packages/pyright-internal/src/localization/package.nls.cs.json b/packages/pyright-internal/src/localization/package.nls.cs.json index f85f8256c8..fa0565dbc0 100644 --- a/packages/pyright-internal/src/localization/package.nls.cs.json +++ b/packages/pyright-internal/src/localization/package.nls.cs.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Metody třídy by měly mít parametr „cls“", "classNotRuntimeSubscriptable": "Dolní index pro třídu {name} vygeneruje výjimku modulu runtime; výraz typu uzavřete do uvozovek.", "classPatternBuiltInArgPositional": "Vzor třídy přijímá pouze poziční dílčí vzor", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Příliš mnoho pozičních vzorů pro třídu \"{type}\"; očekávalo se {expected}, ale přijalo se {received}", "classPatternTypeAlias": "Typ „{type}“ nelze použít ve vzorci třídy, protože se jedná o specializovaný alias typu", "classPropertyDeprecated": "Vlastnosti třídy jsou v Pythonu 3.11 zastaralé a v Pythonu 3.13 se nebudou podporovat.", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Komentáře Pyright používané k řízení nastavení na úrovni souborů se musí zobrazovat na vlastním řádku", "pyrightCommentUnknownDiagnosticRule": "{rule} je neznámé diagnostické pravidlo pro komentář pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "{value} je neplatná hodnota pro komentář pyright; očekávalo se true, false, error, warning, information nebo none", - "pyrightCommentUnknownDirective": "Direktiva {directive} je neznámá direktiva pro komentář pyright; očekávalo se strict nebo basic", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Za „ReadOnly“ se očekával jeden argument typu", "readOnlyNotInTypedDict": "ReadOnly není v tomto kontextu povolené", "recursiveDefinition": "Typ „{name}“ nelze určit, protože odkazuje sám na sebe", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Třída TypedDict není povolená pro kontroly instancí nebo tříd.", "typedDictClosedExtraNotAllowed": "Do {name} nejde přidat položku.", "typedDictClosedExtraTypeMismatch": "Nelze přidat položku {name} s typem {type}.", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Nelze přidat položku {name}, protože musí být NotRequired.", "typedDictExtraFieldNotAllowed": "{name} není k dispozici v {type}", "typedDictExtraFieldTypeMismatch": "Typ {name} není kompatibilní s typem „extra_items“ v typu {type}.", diff --git a/packages/pyright-internal/src/localization/package.nls.de.json b/packages/pyright-internal/src/localization/package.nls.de.json index a213a36831..7ae59038fe 100644 --- a/packages/pyright-internal/src/localization/package.nls.de.json +++ b/packages/pyright-internal/src/localization/package.nls.de.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Klassenmethoden sollten einen \"cls\"-Parameter verwenden.", "classNotRuntimeSubscriptable": "Tiefgestellte Zeichen für die Klasse „{name}“ generieren eine Laufzeitausnahme; schließen Sie den Typausdruck in Anführungszeichen ein", "classPatternBuiltInArgPositional": "Das Klassenmuster akzeptiert nur positionsbezogenes Untermuster.", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Zu viele Positionsmuster für Klasse \"{type}\". Erwartet: {expected}, empfangen: {received}.", "classPatternTypeAlias": "\"{type}\" kann nicht in einem Klassenmuster verwendet werden, da es sich um einen spezialisierten Typalias handelt.", "classPropertyDeprecated": "Klasseneigenschaften sind in Python 3.11 veraltet und werden in Python 3.13 nicht unterstützt.", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Pyright-Kommentare, die zum Steuern von Einstellungen auf Dateiebene verwendet werden, müssen in ihrer eigenen Zeile angezeigt werden.", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" ist eine unbekannte Diagnoseregel für pyright-Kommentar.", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" ist ein ungültiger Wert für den pyright-Kommentar; \"true\", \"false\", \"error\", \"warning\", \"information\" oder \"none\" erwartet.", - "pyrightCommentUnknownDirective": "\"{directive}\" ist eine unbekannte Direktive für pyright-Kommentar; \"strict\" oder \"basic\" erwartet", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Nach \"ReadOnly\" wurde ein einzelnes Typargument erwartet.", "readOnlyNotInTypedDict": "\"ReadOnly\" ist in diesem Kontext nicht zulässig.", "recursiveDefinition": "Der Typ von \"{name}\" konnte nicht bestimmt werden, da er sich auf selbst bezieht.", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Die TypedDict-Klasse ist für Instanz- oder Klassenüberprüfungen nicht zulässig.", "typedDictClosedExtraNotAllowed": "Das Element „{name}“ kann nicht hinzugefügt werden.", "typedDictClosedExtraTypeMismatch": "Das Element „{name}“ mit dem Typ „{type}“ kann nicht hinzugefügt werden.", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Das Element „{name}“ kann nicht hinzugefügt werden, da es „NotRequired“ sein muss.", "typedDictExtraFieldNotAllowed": "„{name}“ ist in „{type}“ nicht vorhanden.", "typedDictExtraFieldTypeMismatch": "Der Typ von „{name}“ ist nicht mit dem Typ „extra_items“ in „{type}“ kompatibel.", diff --git a/packages/pyright-internal/src/localization/package.nls.es.json b/packages/pyright-internal/src/localization/package.nls.es.json index 97a066f784..63a86424c3 100644 --- a/packages/pyright-internal/src/localization/package.nls.es.json +++ b/packages/pyright-internal/src/localization/package.nls.es.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Los métodos de clase deben tomar un parámetro \"cls\"", "classNotRuntimeSubscriptable": "El subíndice para la clase \"{name}\" generará una excepción en tiempo de ejecución; encierre la expresión de tipo entre comillas", "classPatternBuiltInArgPositional": "El patrón de clase solo acepta subpatrones posicionales", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Demasiados patrones posicionales para la clase \"{type}\"; esperado {expected} pero recibido {received}", "classPatternTypeAlias": "\"{type}\" no se puede usar en un patrón de clase porque es un alias de tipo especializado", "classPropertyDeprecated": "Las propiedades de clase están en desuso en Python 3.11 y no se admitirán en Python 3.13.", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Los comentarios de Pyright utilizados para controlar los ajustes a nivel de archivo deben aparecer en su propia línea", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" es una regla de diagnóstico desconocida para el comentario pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" es un valor no válido para el comentario pyright; se espera true, false, error, warning, information o none.", - "pyrightCommentUnknownDirective": "\"{directive}\" es una directiva desconocida para el comentario pyright; se esperaba \"strict\" o \"basic\".", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Se esperaba un único argumento de tipo después de \"ReadOnly\"", "readOnlyNotInTypedDict": "\"ReadOnly\" no está permitido en este contexto", "recursiveDefinition": "No se pudo determinar el tipo de \"{name}\" porque hace referencia a sí mismo.", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "No se permite la clase TypedDict para comprobaciones de instancia o clase", "typedDictClosedExtraNotAllowed": "No se puede agregar el elemento \"{name}\"", "typedDictClosedExtraTypeMismatch": "No se puede agregar el elemento \"{name}\" con el tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "No se puede agregar el elemento \"{name}\" porque debe ser NotRequired.", "typedDictExtraFieldNotAllowed": "\"{name}\" no está presente en \"{type}\"", "typedDictExtraFieldTypeMismatch": "El tipo de \"{name}\" no es compatible con el tipo de \"extra_items\" en \"{type}\"", diff --git a/packages/pyright-internal/src/localization/package.nls.fr.json b/packages/pyright-internal/src/localization/package.nls.fr.json index 003ebf391b..2bca71f998 100644 --- a/packages/pyright-internal/src/localization/package.nls.fr.json +++ b/packages/pyright-internal/src/localization/package.nls.fr.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Les méthodes de classe doivent prendre un paramètre \"cls\"", "classNotRuntimeSubscriptable": "L'indice pour la classe « {name} » générera une exception d'exécution ; placez l'expression de type entre guillemets", "classPatternBuiltInArgPositional": "Le modèle de classe accepte uniquement le sous-modèle positionnel", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Trop de modèles positionnels pour les \"{type}\" de classe ; {expected} attendue mais {received} reçues", "classPatternTypeAlias": "\"{type}\" ne peut pas être utilisé dans un modèle de classe car il s'agit d'un alias de type spécialisé", "classPropertyDeprecated": "Les propriétés de classe sont obsolètes dans Python 3.11 et ne seront pas prises en charge dans Python 3.13", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Les commentaires Pyright utilisés pour contrôler les paramètres au niveau du fichier doivent apparaître sur leur propre ligne", "pyrightCommentUnknownDiagnosticRule": "« {rule} » est une règle de diagnostic inconnue pour le commentaire pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "« {value} » n’est pas valide pour le commentaire pyright ; true, false, error, warning, information ou none attendu", - "pyrightCommentUnknownDirective": "« {directive} » est une directive inconnue pour le commentaire pyright; « strict » ou « basic » attendu", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Attendu un seul argument de type après \"ReadOnly\"", "readOnlyNotInTypedDict": "« ReadOnly » n’est pas autorisé dans ce contexte", "recursiveDefinition": "Le type de \"{name}\" n'a pas pu être déterminé car il fait référence à lui-même", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Classe TypedDict non autorisée pour les vérifications d’instance ou de classe", "typedDictClosedExtraNotAllowed": "Impossible d’ajouter l’élément « {name} »", "typedDictClosedExtraTypeMismatch": "Impossible d’ajouter l’élément « {name} » avec le type « {type} »", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Impossible d’ajouter l’élément « {name} », car il doit être NotRequired", "typedDictExtraFieldNotAllowed": "« {name} » n’est pas présent dans « {type} »", "typedDictExtraFieldTypeMismatch": "Le type de « {name} » est incompatible avec le type « extra_items » dans « {type} »", diff --git a/packages/pyright-internal/src/localization/package.nls.it.json b/packages/pyright-internal/src/localization/package.nls.it.json index af63042327..3d24146a28 100644 --- a/packages/pyright-internal/src/localization/package.nls.it.json +++ b/packages/pyright-internal/src/localization/package.nls.it.json @@ -75,6 +75,7 @@ "classMethodClsParam": "I metodi di classe devono accettare un parametro \"cls\"", "classNotRuntimeSubscriptable": "Il pedice per la classe \"{name}\" genererà un'eccezione di runtime; racchiudere l'espressione di tipo tra virgolette", "classPatternBuiltInArgPositional": "Il modello di classe accetta solo un sotto pattern posizionale", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Troppi modelli posizionale per la classe \"{type}\"; previsto {expected} ma ottenuto {received}", "classPatternTypeAlias": "\"{type}\" non può essere usato in uno schema di classe, perché è un alias di tipo specializzato", "classPropertyDeprecated": "Le proprietà della classe sono deprecate in Python 3.11 e non saranno supportate in Python 3.13", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "I commenti Pyright usati per controllare le impostazioni a livello di file devono essere visualizzati nella propria riga", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" è una regola di diagnostica sconosciuta per il commento pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" non è un valore valido per il commento pyright; previsto true, false, error, warning, information o none", - "pyrightCommentUnknownDirective": "\"{directive}\" è una direttiva sconosciuta per il commento pyright; previsto \"strict\" o \"basic\"", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Previsto un singolo argomento tipo dopo \"ReadOnly\"", "readOnlyNotInTypedDict": "\"ReadOnly\" non consentito in questo contesto", "recursiveDefinition": "Non è stato possibile determinare il tipo di \"{name}\" perché fa riferimento a se stesso", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Classe TypedDict non consentita per i controlli di istanze o classi", "typedDictClosedExtraNotAllowed": "Non è possibile aggiungere l'elemento \"{name}\"", "typedDictClosedExtraTypeMismatch": "Non è possibile aggiungere l'elemento \"{name}\" con tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Non è possibile aggiungere l'elemento \"{name}\" perché deve essere NotRequired", "typedDictExtraFieldNotAllowed": "\"{name}\" non è presente in \"{type}\"", "typedDictExtraFieldTypeMismatch": "Il tipo di \"{name}\" non è compatibile con il tipo \"\"extra_items\" in \"{type}\"", diff --git a/packages/pyright-internal/src/localization/package.nls.ja.json b/packages/pyright-internal/src/localization/package.nls.ja.json index fe93245a71..9fcc4be9b4 100644 --- a/packages/pyright-internal/src/localization/package.nls.ja.json +++ b/packages/pyright-internal/src/localization/package.nls.ja.json @@ -75,6 +75,7 @@ "classMethodClsParam": "クラス メソッドは \"cls\" パラメーターを受け取る必要があります", "classNotRuntimeSubscriptable": "クラス \"{name}\" の添字はランタイム例外を生成します。型式を引用符で囲んでください", "classPatternBuiltInArgPositional": "クラス パターンは位置指定サブパターンのみを受け入れます", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "クラス \"{type}\" の位置指定パターンが多すぎます。{expected} が必要ですが、{received} を受信しました", "classPatternTypeAlias": "\"{type}\" は特殊な型エイリアスであるため、クラス パターンでは使用できません", "classPropertyDeprecated": "クラス プロパティは Python 3.11 では非推奨であり、Python 3.13 ではサポートされなくなります", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "ファイル レベルの設定を制御するために使用する Pyright コメントは、独自の行に表示する必要があります", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" は pyright コメントの不明な診断規則です", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" は、pyright コメントの無効な値です。true、false、error、warning、information または none が必要です", - "pyrightCommentUnknownDirective": "\"{directive}\" は、pyright コメントの不明なディレクティブです。\"strict\" または \"basic\" が必要です", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "\"ReadOnly\" の後に 1 つの型引数が必要です", "readOnlyNotInTypedDict": "\"ReadOnly\" はこのコンテキストでは許可されていません", "recursiveDefinition": "\"{name}\" の型は、それ自体を参照しているため、特定できませんでした", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "TypedDict クラスはインスタンスまたはクラスのチェックには使用できません", "typedDictClosedExtraNotAllowed": "アイテム \"{name}\" を追加できません", "typedDictClosedExtraTypeMismatch": "型 \"{type}\" のアイテム \"{name}\" を追加できません", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "アイテム \"{name}\" を追加できません。これは NotRequired である必要があるためです。", "typedDictExtraFieldNotAllowed": "\"{name}\" は \"{type}\" に存在しません", "typedDictExtraFieldTypeMismatch": "\"{name}\" の型は、\"{type}\" の \"extra_items\" 型と互換性がありません", diff --git a/packages/pyright-internal/src/localization/package.nls.ko.json b/packages/pyright-internal/src/localization/package.nls.ko.json index d8153eb85e..f83b8ed491 100644 --- a/packages/pyright-internal/src/localization/package.nls.ko.json +++ b/packages/pyright-internal/src/localization/package.nls.ko.json @@ -75,6 +75,7 @@ "classMethodClsParam": "클래스 메서드는 ‘cls’ 매개 변수를 사용해야 합니다.", "classNotRuntimeSubscriptable": "클래스 \"{name}\"에 대한 첨자는 런타임 예외를 생성합니다. 형식 식을 따옴표로 묶습니다.", "classPatternBuiltInArgPositional": "클래스 패턴은 위치 하위 패턴만 허용합니다.", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "클래스 \"{type}\"에 대한 위치 패턴이 너무 많습니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", "classPatternTypeAlias": "‘{type}’은(는) 특수 형식 별칭이므로 클래스 패턴에서 사용할 수 없습니다.", "classPropertyDeprecated": "클래스 속성은 Python 3.11에서 더 이상 사용되지 않으며 Python 3.13에서 지원되지 않습니다.", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "파일 수준 설정을 제어하는 데 사용되는Pyright 주석은 고유의 줄에 표시되어야 합니다.", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\"은(는) pyright 주석에 대한 알 수 없는 진단 규칙입니다.", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\"이(가) pyright 주석에 대해 잘못된 값입니다. true, false, error, warning, information 또는 none이 필요합니다.", - "pyrightCommentUnknownDirective": "\"{directive}\"은(는) pyright 주석에 대한 알 수 없는 지시문입니다. \"strict\" 또는 \"basic\"이 필요합니다.", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "‘ReadOnly‘ 뒤에는 단일 형식 인수가 필요합니다.", "readOnlyNotInTypedDict": "이 컨텍스트에서는 \"ReadOnly\"를 사용할 수 없습니다.", "recursiveDefinition": "‘{name}’ 형식이 스스로를 참조하므로 확인할 수 없습니다.", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "인스턴스 또는 클래스 검사에 TypedDict 클래스를 사용할 수 없습니다.", "typedDictClosedExtraNotAllowed": "항목 \"{name}\"을(를) 추가할 수 없음", "typedDictClosedExtraTypeMismatch": "형식이 \"{type}\"인 항목 \"{name}\"을(를) 추가할 수 없음", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "\"{name}\" 항목은 NotRequired여야 하므로 추가할 수 없습니다.", "typedDictExtraFieldNotAllowed": "\"{name}\"이(가) \"{type}\"에 없음", "typedDictExtraFieldTypeMismatch": "\"{name}\" 형식은 \"{type}\"의 \"extra_items\" 형식과 호환되지 않습니다.", diff --git a/packages/pyright-internal/src/localization/package.nls.pl.json b/packages/pyright-internal/src/localization/package.nls.pl.json index 1e998e6fed..4ccce30a0b 100644 --- a/packages/pyright-internal/src/localization/package.nls.pl.json +++ b/packages/pyright-internal/src/localization/package.nls.pl.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Metody klasy powinny przyjmować parametr „cls”", "classNotRuntimeSubscriptable": "Indeks dolny dla klasy „{name}” wygeneruje wyjątek środowiska uruchomieniowego; umieścić wyrażenie typu w cudzysłowy", "classPatternBuiltInArgPositional": "Wzorzec klasy akceptuje tylko podwzorzec pozycyjny", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Zbyt wiele wzorców pozycyjnych dla klasy „{type}”; oczekiwano {expected}, ale otrzymano {received}", "classPatternTypeAlias": "„{type}” nie może być używany we wzorcu klasy, ponieważ jest to alias typu specjalnego", "classPropertyDeprecated": "Właściwości klasy są przestarzałe w języku Python 3.11 i nie będą obsługiwane w języku Python 3.13", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Komentarze Pyright używane do kontrolowania ustawień na poziomie plików muszą pojawiać się w oddzielnych wierszach", "pyrightCommentUnknownDiagnosticRule": "Reguła „{rule}” jest nieznaną regułą diagnostyczną dla komentarza pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "Wartość „{value}” jest nieprawidłowa dla komentarza pyright; oczekiwano wartości: true, false, error, warning, information lub none", - "pyrightCommentUnknownDirective": "Wartość „{directive}” jest nieznaną dyrektywą dla komentarza pyright; oczekiwano wartości „strict” lub „basic”", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Oczekiwano jednego argumentu typu po wartości „ReadOnly”", "readOnlyNotInTypedDict": "Element „ReadOnly” jest niedozwolony w tym kontekście", "recursiveDefinition": "Nie można określić typu „{name}”, ponieważ odwołuje się on do samego siebie", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Klasa TypedDict nie jest dozwolona na potrzeby sprawdzania wystąpienia lub klasy", "typedDictClosedExtraNotAllowed": "Nie można dodać elementu \"{name}\"", "typedDictClosedExtraTypeMismatch": "Nie można dodać elementu \"{name}\" z typem „{type}”", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Nie można dodać elementu \"{name}\", ponieważ musi on mieć wartość NotRequired", "typedDictExtraFieldNotAllowed": "Element „{name}” nie jest obecny w typie „{type}”", "typedDictExtraFieldTypeMismatch": "Typ „{name}” jest niezgodny z typem „extra_items” w typie „{type}”", diff --git a/packages/pyright-internal/src/localization/package.nls.pt-br.json b/packages/pyright-internal/src/localization/package.nls.pt-br.json index 5924db4f39..b6e02865cc 100644 --- a/packages/pyright-internal/src/localization/package.nls.pt-br.json +++ b/packages/pyright-internal/src/localization/package.nls.pt-br.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Os métodos de classe devem usar um parâmetro \"cls\"", "classNotRuntimeSubscriptable": "O subscrito para a classe \"{name}\" gerará uma exceção de runtime. Coloque a expressão de tipo entre aspas", "classPatternBuiltInArgPositional": "O padrão de classe aceita apenas sub-padrão posicional", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "Muitos padrões posicionais para a classe \"{type}\"; esperado {expected} mas recebido {received}", "classPatternTypeAlias": "\"{type}\" não pode ser usado em um padrão de classe porque é um alias de tipo especializado", "classPropertyDeprecated": "As propriedades de classe foram preteridas no Python 3.11 e não terão suporte no Python 3.13", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Comentários Pyright usados para controlar as configurações de nível de arquivo devem aparecer em sua própria linha", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" é uma regra de diagnóstico desconhecida para o comentário pyright", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" é um valor inválido para o comentário pyright. true, false, error, warning, information ou none esperados.", - "pyrightCommentUnknownDirective": "\"{directive}\" é uma diretiva desconhecida para o comentário pyright. Esperava-se \"strict\" ou \"basic\"", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "Argumento de tipo único esperado após \"ReadOnly\"", "readOnlyNotInTypedDict": "\"ReadOnly\" não é permitido neste contexto", "recursiveDefinition": "Não foi possível determinar o tipo de \"{name}\" porque ele refere-se a si mesmo", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "A classe TypedDict não é permitida para verificações de instância ou classe", "typedDictClosedExtraNotAllowed": "Não é possível adicionar o item \"{name}\"", "typedDictClosedExtraTypeMismatch": "Não é possível adicionar o item \"{name}\" com o tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "Não é possível adicionar o item \"{name}\" porque ele deve ser NotRequired", "typedDictExtraFieldNotAllowed": "\"{name}\" não está presente em \"{type}\"", "typedDictExtraFieldTypeMismatch": "Tipo de \"{name}\" é incompatível com tipo de \"extra_items\" em \"{type}\"", diff --git a/packages/pyright-internal/src/localization/package.nls.qps-ploc.json b/packages/pyright-internal/src/localization/package.nls.qps-ploc.json index 96cb8535f7..822dfbf368 100644 --- a/packages/pyright-internal/src/localization/package.nls.qps-ploc.json +++ b/packages/pyright-internal/src/localization/package.nls.qps-ploc.json @@ -75,6 +75,7 @@ "classMethodClsParam": "[aWMN3][นั้Çlæss mëthøðs shøµlð tækë æ \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", "classNotRuntimeSubscriptable": "[O9BL6][นั้§µþsçrïpt før çlæss \"{ñæmë}\" wïll gëñërætë rµñtïmë ëxçëptïøñ; ëñçløsë tÿpë ëxprëssïøñ ïñ qµøtësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", "classPatternBuiltInArgPositional": "[DOfs5][นั้Çlæss pættërñ æççëpts øñlÿ pøsïtïøñæl sµþ-pættërñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "classPatternNewType": "[9l6u3][นั้\"{tÿpë}\" çæññøt þë µsëð ïñ æ çlæss pættërñ þëçæµsë ït ïs ðëfïñëð µsïñg ÑëwTÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", "classPatternPositionalArgCount": "[B65y5][นั้Tøø mæñÿ pøsïtïøñæl pættërñs før çlæss \"{tÿpë}\"; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", "classPatternTypeAlias": "[AxDtv][นั้\"{tÿpë}\" çæññøt þë µsëð ïñ æ çlæss pættërñ þëçæµsë ït ïs æ spëçïælïzëð tÿpë ælïæsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", "classPropertyDeprecated": "[Q6JgP][นั้Çlæss prøpërtïës ærë ðëprëçætëð ïñ Pÿthøñ 3.11 æñð wïll ñøt þë sµppørtëð ïñ Pÿthøñ 3.13Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "[mM2bV][นั้Pyright çømmëñts µsëð tø çøñtrøl fïlë-lëvël sëttïñgs mµst æppëær øñ thëïr øwñ lïñëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", "pyrightCommentUnknownDiagnosticRule": "[DFAZp][นั้\"{rµlë}\" ïs æñ µñkñøwñ ðïægñøstïç rµlë før pyright çømmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", "pyrightCommentUnknownDiagnosticSeverityValue": "[Tgt0Y][นั้\"{vælµë}\" ïs ïñvælïð vælµë før pyright çømmëñt; ëxpëçtëð true, false, error, warning, information, ør noneẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", - "pyrightCommentUnknownDirective": "[HD6T4][นั้\"{ðïrëçtïvë}\" ïs æñ µñkñøwñ ðïrëçtïvë før pyright çømmëñt; ëxpëçtëð \"strict\" ør \"basic\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "pyrightCommentUnknownDirective": "[HD6T4][นั้\"{ðïrëçtïvë}\" ïs æñ µñkñøwñ ðïrëçtïvë før pyright çømmëñt; ëxpëçtëð \"strict\", \"standard\", ør \"basic\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", "readOnlyArgCount": "[B1Erm][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"ReadOnly\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", "readOnlyNotInTypedDict": "[xJrLN][นั้\"ReadOnly\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", "recursiveDefinition": "[G3UUN][นั้Tÿpë øf \"{ñæmë}\" çøµlð ñøt þë ðëtërmïñëð þëçæµsë ït rëfërs tø ïtsëlfẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "[Vgl7x][นั้TypedDict çlæss ñøt ælløwëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", "typedDictClosedExtraNotAllowed": "[zT7Rm][นั้Çæññøt æðð ïtëm \"{ñæmë}\"Ấğ倪İЂҰक्र्นั้ढूँ]", "typedDictClosedExtraTypeMismatch": "[blC1e][นั้Çæññøt æðð ïtëm \"{ñæmë}\" wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typedDictClosedFieldNotReadOnly": "[45ICT][นั้Çæññøt æðð ïtëm \"{ñæmë}\" þëçæµsë ït mµst þë ReadOnlyẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", "typedDictClosedFieldNotRequired": "[6rtDR][นั้Çæññøt æðð ïtëm \"{ñæmë}\" þëçæµsë ït mµst þë NotRequiredẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", "typedDictExtraFieldNotAllowed": "[kFDh9][นั้\"{ñæmë}\" ïs ñøt prësëñt ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", "typedDictExtraFieldTypeMismatch": "[DnAhM][นั้Tÿpë øf \"{ñæmë}\" ïs ïñçømpætïþlë wïth tÿpë øf \"extra_items\" ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", diff --git a/packages/pyright-internal/src/localization/package.nls.tr.json b/packages/pyright-internal/src/localization/package.nls.tr.json index 79fa8742de..2caffe6f0c 100644 --- a/packages/pyright-internal/src/localization/package.nls.tr.json +++ b/packages/pyright-internal/src/localization/package.nls.tr.json @@ -75,6 +75,7 @@ "classMethodClsParam": "Sınıf metotları bir \"cls\" parametresi almalıdır", "classNotRuntimeSubscriptable": "\"{name}\" sınıfına ait alt simge çalışma zamanı özel durumunu oluşturur; tür ifadelerini tırnak içine alın", "classPatternBuiltInArgPositional": "Sınıf deseni yalnızca konumsal alt desen kabul eder", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "\"{type}\" sınıfı için çok fazla konumsal desen var; {expected} bekleniyordu ancak {received} alındı", "classPatternTypeAlias": "\"{type}\" özel bir tür diğer adı olduğundan sınıf deseninde kullanılamaz", "classPropertyDeprecated": "Sınıf özellikleri Python 3.11'de kullanım dışıdır ve Python 3.13'te desteklenmez", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "Pyright comments used to control file-level settings must appear on their own line", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\", pyright açıklaması için bilinmeyen bir tanılama kuralı", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" pyright açıklaması için geçersiz değer; true, false, error, warning, information veya none bekleniyordu", - "pyrightCommentUnknownDirective": "\"{directive}\", pyright açıklaması için bilinmeyen bir yönergedir; \"strict\" veya \"basic\" bekleniyordu", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "\"ReadOnly\" sonrasında tek bir tür bağımsız değişken bekleniyordu", "readOnlyNotInTypedDict": "Bu bağlamda \"ReadOnly\" kullanımına izin verilmiyor", "recursiveDefinition": "Kendine başvurduğundan \"{name}\" türü belirlenemedi", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "Örnek veya sınıf denetimleri için TypedDict sınıfına izin verilmiyor", "typedDictClosedExtraNotAllowed": "\"{name}\" öğesi eklenemiyor", "typedDictClosedExtraTypeMismatch": "\"{type}\" türündeki \"{name}\" öğesi eklenemiyor", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "NotRequired olması gerektiğinden \"{name}\" öğesi eklenemiyor", "typedDictExtraFieldNotAllowed": "\"{name}\" öğesi \"{type}\" türünde mevcut değil", "typedDictExtraFieldTypeMismatch": "\"{name}\" türü, \"{type}\" altındaki \"extra_items\" türüyle uyumlu değil", diff --git a/packages/pyright-internal/src/localization/package.nls.zh-tw.json b/packages/pyright-internal/src/localization/package.nls.zh-tw.json index 6b533bf513..93cd53ad47 100644 --- a/packages/pyright-internal/src/localization/package.nls.zh-tw.json +++ b/packages/pyright-internal/src/localization/package.nls.zh-tw.json @@ -75,6 +75,7 @@ "classMethodClsParam": "類別方法應採用 \"cls\" 參數", "classNotRuntimeSubscriptable": "類別 \"{name}\" 的下標會產生執行階段例外; 以引號括住類型運算式", "classPatternBuiltInArgPositional": "類別模式僅接受位置子模式", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", "classPatternPositionalArgCount": "類別 \"{type}\" 的位置模式太多;預期 {expected} 但收到 {received}", "classPatternTypeAlias": "無法在類別模式中使用 \"{type}\",因為它是特殊的型別別名", "classPropertyDeprecated": "類別屬性在 Python 3.11 中已取代,在 Python 3.13 中將不受支援", @@ -434,7 +435,7 @@ "pyrightCommentNotOnOwnLine": "用來控制檔案層級設定的 Pyright 註解必須出現在自己的行上", "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" 是 pyright 註解未知的診斷規則", "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" 是 pyright 註解無效的值; 預期為 true、false、error、warning、information 或 none", - "pyrightCommentUnknownDirective": "\"{directive}\" 是 pyright 註解未知的指示詞; 預期為 \"strict\" 或 \"basic\"", + "pyrightCommentUnknownDirective": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", "readOnlyArgCount": "\"ReadOnly\" 後面應有單一型別引數", "readOnlyNotInTypedDict": "此內容中不允許 \"ReadOnly\"", "recursiveDefinition": "無法判斷 \"{name}\" 型別,因為它參照了自己", @@ -822,6 +823,7 @@ "typedDictClassNotAllowed": "執行個體或類別檢查不允許 TypedDict 類別", "typedDictClosedExtraNotAllowed": "無法新增項目 \"{name}\"", "typedDictClosedExtraTypeMismatch": "無法新增型別為 \"{type}\" 的項目 \"{name}\"", + "typedDictClosedFieldNotReadOnly": "Cannot add item \"{name}\" because it must be ReadOnly", "typedDictClosedFieldNotRequired": "無法新增項目 \"{name}\",因為它必須是 NotRequired", "typedDictExtraFieldNotAllowed": "\"{name}\" 不存在於 \"{type}\"", "typedDictExtraFieldTypeMismatch": "\"{name}\" 的類型與 \"{type}\" 中 \"extra_items\" 的類型不相容", diff --git a/packages/pyright-internal/src/realLanguageServer.ts b/packages/pyright-internal/src/realLanguageServer.ts index cbdeb88690..6446af2acf 100644 --- a/packages/pyright-internal/src/realLanguageServer.ts +++ b/packages/pyright-internal/src/realLanguageServer.ts @@ -283,7 +283,7 @@ export abstract class RealLanguageServer extends LanguageServerBase { } catch (error) { const errorMessage = error instanceof Error ? error.message : error; this.connection.sendNotification(ShowMessageNotification.type, { - message: errorMessage, + message: String(errorMessage), type: MessageType.Error, }); } diff --git a/packages/pyright-internal/src/tests/completions.test.ts b/packages/pyright-internal/src/tests/completions.test.ts index de147cccc5..bed770f2a6 100644 --- a/packages/pyright-internal/src/tests/completions.test.ts +++ b/packages/pyright-internal/src/tests/completions.test.ts @@ -1876,6 +1876,119 @@ test('overloaded Literal[...] suggestions in call arguments', async () => { }); }); +test('nested TypedDict completion with Unpack - without other fields', async () => { + const code = ` +// @filename: test.py +//// from typing import Unpack, TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// class OuterDict(TypedDict): +//// inner: InnerDict +//// field_1: str +//// +//// def test_inner_dict(**kwargs: Unpack[OuterDict]): +//// pass +//// +//// test_inner_dict(inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'a'", + textEdit: { range: state.getPositionRange('marker'), newText: "'a'" }, + }, + { + kind: CompletionItemKind.Constant, + label: "'b'", + textEdit: { range: state.getPositionRange('marker'), newText: "'b'" }, + }, + ], + }, + }); +}); + +test('nested TypedDict completion with Unpack - with other fields', async () => { + const code = ` +// @filename: test.py +//// from typing import Unpack, TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// class OuterDict(TypedDict): +//// inner: InnerDict +//// field_1: str +//// +//// def test_inner_dict(**kwargs: Unpack[OuterDict]): +//// pass +//// +//// test_inner_dict(field_1="test", inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"a"', + textEdit: { range: state.getPositionRange('marker'), newText: '"a"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"b"', + textEdit: { range: state.getPositionRange('marker'), newText: '"b"' }, + }, + ], + }, + }); +}); + +test('simple nested TypedDict completion - no Unpack', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// def test_func(inner: InnerDict): +//// pass +//// +//// test_func(inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'a'", + textEdit: { range: state.getPositionRange('marker'), newText: "'a'" }, + }, + { + kind: CompletionItemKind.Constant, + label: "'b'", + textEdit: { range: state.getPositionRange('marker'), newText: "'b'" }, + }, + ], + }, + }); +}); + test('dataclass field alias with invalid python identifier', async () => { const code = ` // @filename: test.py diff --git a/packages/pyright-internal/src/tests/samples/matchSequence1.py b/packages/pyright-internal/src/tests/samples/matchSequence1.py index 7f623fb33a..5f76d33ce9 100644 --- a/packages/pyright-internal/src/tests/samples/matchSequence1.py +++ b/packages/pyright-internal/src/tests/samples/matchSequence1.py @@ -627,6 +627,38 @@ def test_unbounded_tuple_6(subj: tuple[str, ...]): reveal_type(r, expected_text="tuple[str, ...]") +def test_unbound_tuple_7(subj: tuple[str, Unpack[tuple[object, ...]], int]): + match subj: + case (*args,): + reveal_type(args, expected_text="list[str | object | int]") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (*args, last): + reveal_type(args, expected_text="list[str | object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (first, *args, last): + reveal_type(first, expected_text="str") + reveal_type(args, expected_text="list[object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (first, second, *args, last): + reveal_type(first, expected_text="str") + reveal_type(second, expected_text="object") + reveal_type(args, expected_text="list[object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="tuple[str, *tuple[object, ...], int]") + + def test_variadic_tuple(subj: tuple[int, Unpack[Ts]]) -> tuple[Unpack[Ts]]: match subj: case _, *rest: diff --git a/packages/pyright-internal/typeshed-fallback/README.md b/packages/pyright-internal/typeshed-fallback/README.md index 1467aa20b4..d295b56bc0 100644 --- a/packages/pyright-internal/typeshed-fallback/README.md +++ b/packages/pyright-internal/typeshed-fallback/README.md @@ -7,10 +7,10 @@ ## About Typeshed contains external type annotations for the Python standard library -and Python builtins, as well as third party packages as contributed by +and Python builtins, as well as third-party packages that are contributed by people external to those projects. -This data can e.g. be used for static analysis, type checking, type inference, +This data can, e.g., be used for static analysis, type checking, type inference, and autocompletion. For information on how to use typeshed, read below. Information for @@ -29,8 +29,8 @@ If you're just using a type checker (e.g. [mypy](https://github.com/python/mypy/ [pyright](https://github.com/microsoft/pyright), or PyCharm's built-in type checker), as opposed to developing it, you don't need to interact with the typeshed repo at -all: a copy of standard library part of typeshed is bundled with type checkers. -And type stubs for third party packages and modules you are using can +all: a copy of the standard library part of typeshed is bundled with type checkers. +And type stubs for third-party packages and modules you are using can be installed from PyPI. For example, if you are using `html5lib` and `requests`, you can install the type stubs using @@ -70,7 +70,7 @@ package you're using, each with its own tradeoffs: type checking due to changes in the stubs. Another risk of this strategy is that stubs often lag behind - the package being stubbed. You might want to force the package being stubbed + the package that is being stubbed. You might want to force the package being stubbed to a certain minimum version because it fixes a critical bug, but if correspondingly updated stubs have not been released, your type checking results may not be fully accurate. @@ -119,6 +119,6 @@ a review of your type annotations or stubs outside of typeshed, head over to [our discussion forum](https://github.com/python/typing/discussions). For less formal discussion, try the typing chat room on [gitter.im](https://gitter.im/python/typing). Some typeshed maintainers -are almost always present; feel free to find us there and we're happy +are almost always present; feel free to find us there, and we're happy to chat. Substantive technical discussion will be directed to the issue tracker. diff --git a/packages/pyright-internal/typeshed-fallback/commit.txt b/packages/pyright-internal/typeshed-fallback/commit.txt index 3ed4df859a..962b4bf23c 100644 --- a/packages/pyright-internal/typeshed-fallback/commit.txt +++ b/packages/pyright-internal/typeshed-fallback/commit.txt @@ -1 +1 @@ -a205439338a4ad3debec1eeae7d300e3781c066d +a564787bf23386e57338b750bf4733f3c978b701 diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi index aa67df2ab4..6015bcb13f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi @@ -1,6 +1,6 @@ # _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) -from _typeshed import Incomplete, WriteableBuffer +from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase from typing import Any, Protocol, type_check_only @@ -13,13 +13,24 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... +@type_check_only +class _Decompressor(Protocol): + def decompress(self, data: ReadableBuffer, /, max_length: int = ...) -> bytes: ... + @property + def unused_data(self) -> bytes: ... + @property + def eof(self) -> bool: ... + # `zlib._Decompress` does not have next property, but `DecompressReader` calls it: + # @property + # def needs_input(self) -> bool: ... + class BaseStream(BufferedIOBase): ... class DecompressReader(RawIOBase): def __init__( self, fp: _Reader, - decomp_factory: Callable[..., Incomplete], + decomp_factory: Callable[..., _Decompressor], trailing_error: type[Exception] | tuple[type[Exception], ...] = (), **decomp_args: Any, # These are passed to decomp_factory. ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_ctypes.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_ctypes.pyi index c87cf5e326..be7792818d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_ctypes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_ctypes.pyi @@ -1,11 +1,12 @@ import _typeshed +import builtins import sys from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from abc import abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from ctypes import CDLL, ArgumentError as ArgumentError, c_void_p from types import GenericAlias -from typing import Any, ClassVar, Final, Generic, TypeVar, final, overload, type_check_only +from typing import Any, ClassVar, Final, Generic, Literal, TypeVar, final, overload, type_check_only from typing_extensions import Self, TypeAlias _T = TypeVar("_T") @@ -195,24 +196,45 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): _GetT = TypeVar("_GetT") _SetT = TypeVar("_SetT") -# This class is not exposed. It calls itself _ctypes.CField. -@final -@type_check_only -class _CField(Generic[_CT, _GetT, _SetT]): - offset: int - size: int - if sys.version_info >= (3, 10): - @overload - def __get__(self, instance: None, owner: type[Any] | None = None, /) -> Self: ... - @overload - def __get__(self, instance: Any, owner: type[Any] | None = None, /) -> _GetT: ... - else: +if sys.version_info >= (3, 14): + @final + class CField(Generic[_CT, _GetT, _SetT]): + offset: int + size: int + name: str + type: builtins.type[_CT] + byte_offset: int + byte_size: int + is_bitfield: bool + bit_offset: int + bit_size: int + is_anonymous: bool @overload - def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ... + def __get__(self, instance: None, owner: builtins.type[Any] | None = None, /) -> Self: ... @overload - def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ... + def __get__(self, instance: Any, owner: builtins.type[Any] | None = None, /) -> _GetT: ... + def __set__(self, instance: Any, value: _SetT, /) -> None: ... - def __set__(self, instance: Any, value: _SetT, /) -> None: ... + _CField = CField + +else: + @final + @type_check_only + class _CField(Generic[_CT, _GetT, _SetT]): + offset: int + size: int + if sys.version_info >= (3, 10): + @overload + def __get__(self, instance: None, owner: type[Any] | None = None, /) -> Self: ... + @overload + def __get__(self, instance: Any, owner: type[Any] | None = None, /) -> _GetT: ... + else: + @overload + def __get__(self, instance: None, owner: type[Any] | None, /) -> Self: ... + @overload + def __get__(self, instance: Any, owner: type[Any] | None, /) -> _GetT: ... + + def __set__(self, instance: Any, value: _SetT, /) -> None: ... # This class is not exposed. It calls itself _ctypes.UnionType. @type_check_only @@ -266,6 +288,10 @@ class Structure(_CData, metaclass=_PyCStructType): if sys.version_info >= (3, 13): _align_: ClassVar[int] + if sys.version_info >= (3, 14): + # _layout_ can be defined by the user, but is not always present. + _layout_: ClassVar[Literal["ms", "gcc-sysv"]] + def __init__(self, *args: Any, **kw: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/README.md b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/README.md deleted file mode 100644 index f4808944fa..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# Utility types for typeshed - -This package and its submodules contains various common types used by -typeshed. It can also be used by packages outside typeshed, but beware -the API stability guarantees below. - -## Usage - -The `_typeshed` package and its types do not exist at runtime, but can be -used freely in stubs (`.pyi`) files. To import the types from this package in -implementation (`.py`) files, use the following construct: - -```python -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from _typeshed import ... -``` - -Types can then be used in annotations by either quoting them or -using: - -```python -from __future__ import annotations -``` - -## API Stability - -You can use this package and its submodules outside of typeshed, but we -guarantee only limited API stability. Items marked as "stable" will not be -removed or changed in an incompatible way for at least one year. -Before making such a change, the "stable" moniker will be removed -and we will mark the type in question as deprecated. No guarantees -are made about unmarked types. diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi index d9e2c377b1..42efce9bed 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi @@ -192,6 +192,9 @@ if sys.platform == "win32": template_file: int, /, ) -> int: ... + def CreateFileMapping( + file_handle: int, security_attributes: int, protect: int, max_size_high: int, max_size_low: int, name: str, / + ) -> int: ... def CreateJunction(src_path: str, dst_path: str, /) -> None: ... def CreateNamedPipe( name: str, @@ -235,6 +238,9 @@ if sys.platform == "win32": def GetModuleFileName(module_handle: int, /) -> str: ... def GetStdHandle(std_handle: int, /) -> int: ... def GetVersion() -> int: ... + def MapViewOfFile( + file_map: int, desired_access: int, file_offset_high: int, file_offset_low: int, number_bytes: int, / + ) -> int: ... def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ... def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): @@ -251,6 +257,7 @@ if sys.platform == "win32": named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, / ) -> None: ... def TerminateProcess(handle: int, exit_code: int, /) -> None: ... + def VirtualQuerySize(address: int, /) -> int: ... def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ... def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: ... def WaitNamedPipe(name: str, timeout: int, /) -> None: ... @@ -281,6 +288,8 @@ if sys.platform == "win32": def ResetEvent(event: int) -> None: ... def SetEvent(event: int) -> None: ... + def OpenFileMapping(desired_access: int, inherit_handle: bool, name: str, /) -> int: ... + if sys.version_info >= (3, 12): def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_zstd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_zstd.pyi index f5e98ef88b..e40c7d12b6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_zstd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_zstd.pyi @@ -46,7 +46,10 @@ class ZstdCompressor: FLUSH_BLOCK: Final = 1 FLUSH_FRAME: Final = 2 def __new__( - cls, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None + cls, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> Self: ... def compress( self, /, data: ReadableBuffer, mode: _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 0 @@ -58,7 +61,9 @@ class ZstdCompressor: @final class ZstdDecompressor: - def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> Self: ... + def __new__( + cls, zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, options: Mapping[int, int] | None = None + ) -> Self: ... def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property def eof(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi index bce20e0925..ae99eb0368 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi @@ -91,15 +91,40 @@ class _ActionsContainer: version: str = ..., **kwargs: Any, ) -> Action: ... - def add_argument_group( - self, - title: str | None = None, - description: str | None = None, - *, - prefix_chars: str = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - ) -> _ArgumentGroup: ... + if sys.version_info >= (3, 14): + @overload + def add_argument_group( + self, + title: str | None = None, + description: str | None = None, + *, + # argument_default's type must be valid for the arguments in the group + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> _ArgumentGroup: ... + @overload + @deprecated("The `prefix_chars` parameter deprecated since Python 3.14.") + def add_argument_group( + self, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str, + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> _ArgumentGroup: ... + else: + def add_argument_group( + self, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str = ..., + # argument_default's type must be valid for the arguments in the group + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> _ArgumentGroup: ... + def add_mutually_exclusive_group(self, *, required: bool = False) -> _MutuallyExclusiveGroup: ... def _add_action(self, action: _ActionT) -> _ActionT: ... def _remove_action(self, action: Action) -> None: ... @@ -249,7 +274,11 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def _read_args_from_files(self, arg_strings: list[str]) -> list[str]: ... def _match_argument(self, action: Action, arg_strings_pattern: str) -> int: ... def _match_arguments_partial(self, actions: Sequence[Action], arg_strings_pattern: str) -> list[int]: ... - def _parse_optional(self, arg_string: str) -> tuple[Action | None, str, str | None] | None: ... + if sys.version_info >= (3, 12): + def _parse_optional(self, arg_string: str) -> list[tuple[Action | None, str, str | None, str | None]] | None: ... + else: + def _parse_optional(self, arg_string: str) -> tuple[Action | None, str, str | None] | None: ... + def _get_option_tuples(self, option_string: str) -> list[tuple[Action, str, str | None]]: ... def _get_nargs_pattern(self, action: Action) -> str: ... def _get_values(self, action: Action, arg_strings: list[str]) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi index 59212f4ec3..777961d804 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi @@ -17,12 +17,31 @@ if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `async def` instead.") def coroutine(func: _FunctionT) -> _FunctionT: ... -@overload -def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... -@overload -def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... -@overload -def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... -@overload -def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: ... + +if sys.version_info >= (3, 11): + @overload + @deprecated("Deprecated since Python 3.14. Use `inspect.iscoroutinefunction()` instead.") + def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... + @overload + @deprecated("Deprecated since Python 3.14. Use `inspect.iscoroutinefunction()` instead.") + def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... + @overload + @deprecated("Deprecated since Python 3.14. Use `inspect.iscoroutinefunction()` instead.") + def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... + @overload + @deprecated("Deprecated since Python 3.14. Use `inspect.iscoroutinefunction()` instead.") + def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... + +else: + # Sometimes needed in Python < 3.11 due to the fact that it supports @coroutine + # which was removed in 3.11 which the inspect version doesn't support. + + @overload + def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... + @overload + def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... + @overload + def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... + @overload + def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi index 2c52ad4be4..3a8965f03e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi @@ -14,7 +14,7 @@ class BaseProtocol: class Protocol(BaseProtocol): # Need annotation or mypy will complain about 'Cannot determine type of "__slots__" in base class' - __slots__: tuple[()] = () + __slots__: tuple[str, ...] = () def data_received(self, data: bytes) -> None: ... def eof_received(self) -> bool | None: ... @@ -35,7 +35,7 @@ class DatagramProtocol(BaseProtocol): def error_received(self, exc: Exception) -> None: ... class SubprocessProtocol(BaseProtocol): - __slots__: tuple[()] = () + __slots__: tuple[str, ...] = () def pipe_data_received(self, fd: int, data: bytes) -> None: ... def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... def process_exited(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi index 919e6521f8..a100c9bcec 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Unused -from collections.abc import Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine from contextvars import Context from typing import Any, TypeVar, final from typing_extensions import Self @@ -22,7 +22,10 @@ if sys.version_info >= (3, 11): def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... def close(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... - def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... + if sys.version_info >= (3, 14): + def run(self, coro: Awaitable[_T], *, context: Context | None = None) -> _T: ... + else: + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... if sys.version_info >= (3, 12): def run( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi index 492f1e42ad..d3e95559eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi @@ -62,7 +62,7 @@ class TransportSocket: def listen(self, backlog: int = ..., /) -> None: ... @deprecated("Removed in Python 3.11") def makefile(self) -> BinaryIO: ... - @deprecated("Rmoved in Python 3.11") + @deprecated("Removed in Python 3.11") def sendfile(self, file: BinaryIO, offset: int = 0, count: int | None = None) -> int: ... @deprecated("Removed in Python 3.11") def close(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi index 1bc4649333..416d793de6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi @@ -42,6 +42,7 @@ from typing import ( # noqa: Y022,UP035 Any, BinaryIO, ClassVar, + Final, Generic, Mapping, MutableMapping, @@ -189,8 +190,9 @@ class type: __bases__: tuple[type, ...] @property def __basicsize__(self) -> int: ... - @property - def __dict__(self) -> types.MappingProxyType[str, Any]: ... # type: ignore[override] + # type.__dict__ is read-only at runtime, but that can't be expressed currently. + # See https://github.com/python/typeshed/issues/11033 for a discussion. + __dict__: Final[types.MappingProxyType[str, Any]] # type: ignore[assignment] @property def __dictoffset__(self) -> int: ... @property @@ -729,8 +731,13 @@ class bytes(Sequence[int]): def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: ... def upper(self) -> bytes: ... def zfill(self, width: SupportsIndex, /) -> bytes: ... - @classmethod - def fromhex(cls, string: str, /) -> Self: ... + if sys.version_info >= (3, 14): + @classmethod + def fromhex(cls, string: str | ReadableBuffer, /) -> Self: ... + else: + @classmethod + def fromhex(cls, string: str, /) -> Self: ... + @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... def __len__(self) -> int: ... @@ -834,8 +841,13 @@ class bytearray(MutableSequence[int]): def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, width: SupportsIndex, /) -> bytearray: ... - @classmethod - def fromhex(cls, string: str, /) -> Self: ... + if sys.version_info >= (3, 14): + @classmethod + def fromhex(cls, string: str | ReadableBuffer, /) -> Self: ... + else: + @classmethod + def fromhex(cls, string: str, /) -> Self: ... + @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... def __len__(self) -> int: ... @@ -942,11 +954,15 @@ class memoryview(Sequence[_I]): def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... def __buffer__(self, flags: int, /) -> memoryview: ... def __release_buffer__(self, buffer: memoryview, /) -> None: ... + if sys.version_info >= (3, 14): + def index(self, value: object, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + def count(self, value: object, /) -> int: ... + else: + # These are inherited from the Sequence ABC, but don't actually exist on memoryview. + # See https://github.com/python/cpython/issues/125420 + index: ClassVar[None] # type: ignore[assignment] + count: ClassVar[None] # type: ignore[assignment] - # These are inherited from the Sequence ABC, but don't actually exist on memoryview. - # See https://github.com/python/cpython/issues/125420 - index: ClassVar[None] # type: ignore[assignment] - count: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @@ -1365,13 +1381,6 @@ class property: def __set__(self, instance: Any, value: Any, /) -> None: ... def __delete__(self, instance: Any, /) -> None: ... -@final -@type_check_only -class _NotImplementedType(Any): - __call__: None - -NotImplemented: _NotImplementedType - def abs(x: SupportsAbs[_T], /) -> _T: ... def all(iterable: Iterable[object], /) -> bool: ... def any(iterable: Iterable[object], /) -> bool: ... @@ -2030,14 +2039,14 @@ def __import__( def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... if sys.version_info >= (3, 10): - from types import EllipsisType + from types import EllipsisType, NotImplementedType # Backwards compatibility hack for folks who relied on the ellipsis type # existing in typeshed in Python 3.9 and earlier. ellipsis = EllipsisType Ellipsis: EllipsisType - + NotImplemented: NotImplementedType else: # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. @@ -2047,6 +2056,12 @@ else: Ellipsis: ellipsis + @final + @type_check_only + class _NotImplementedType(Any): ... + + NotImplemented: _NotImplementedType + @disjoint_base class BaseException: args: tuple[Any, ...] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi index d00f0d5d2b..0d3a0a7490 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi @@ -56,10 +56,12 @@ if sys.version_info >= (3, 12): _LocaleType: TypeAlias = tuple[str | None, str | None] -class IllegalMonthError(ValueError): +class IllegalMonthError(ValueError, IndexError): + month: int def __init__(self, month: int) -> None: ... class IllegalWeekdayError(ValueError): + weekday: int def __init__(self, weekday: int) -> None: ... def isleap(year: int) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi index fa4d4fd4ba..4dfe3fd9e8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi @@ -5,7 +5,7 @@ from _typeshed import ReadableBuffer from abc import abstractmethod from collections.abc import Callable, Generator, Iterable from typing import Any, BinaryIO, ClassVar, Final, Literal, Protocol, TextIO, overload, type_check_only -from typing_extensions import Self, TypeAlias, disjoint_base +from typing_extensions import Self, TypeAlias, deprecated, disjoint_base __all__ = [ "register", @@ -191,6 +191,7 @@ def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDe def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: ... def getwriter(encoding: str) -> _StreamWriter: ... +@deprecated("Deprecated since Python 3.14. Use `open()` instead.") def open( filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 ) -> StreamReaderWriter: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/compression/_common/_streams.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/compression/_common/_streams.pyi index b8463973ec..96aec24d1c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/compression/_common/_streams.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/compression/_common/_streams.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, WriteableBuffer +from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase from typing import Any, Protocol, type_check_only @@ -11,13 +11,24 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... +@type_check_only +class _Decompressor(Protocol): + def decompress(self, data: ReadableBuffer, /, max_length: int = ...) -> bytes: ... + @property + def unused_data(self) -> bytes: ... + @property + def eof(self) -> bool: ... + # `zlib._Decompress` does not have next property, but `DecompressReader` calls it: + # @property + # def needs_input(self) -> bool: ... + class BaseStream(BufferedIOBase): ... class DecompressReader(RawIOBase): def __init__( self, fp: _Reader, - decomp_factory: Callable[..., Incomplete], # Consider backporting changes to _compression + decomp_factory: Callable[..., _Decompressor], # Consider backporting changes to _compression trailing_error: type[Exception] | tuple[type[Exception], ...] = (), **decomp_args: Any, # These are passed to decomp_factory. ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/__init__.pyi index d5da4be036..acfbe4913b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/__init__.pyi @@ -44,9 +44,14 @@ def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: ... def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: ... def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: ... def compress( - data: ReadableBuffer, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None + data: ReadableBuffer, + level: int | None = None, + options: Mapping[int, int] | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, +) -> bytes: ... +def decompress( + data: ReadableBuffer, zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, options: Mapping[int, int] | None = None ) -> bytes: ... -def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: ... @final class CompressionParameter(enum.IntEnum): compression_level = _zstd.ZSTD_c_compressionLevel diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/_zstdfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/_zstdfile.pyi index e67b3d992f..d37e6b1741 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/_zstdfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/compression/zstd/_zstdfile.pyi @@ -36,7 +36,7 @@ class ZstdFile(_streams.BaseStream): *, level: None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> None: ... @overload def __init__( @@ -47,7 +47,7 @@ class ZstdFile(_streams.BaseStream): *, level: int | None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> None: ... def write(self, data: ReadableBuffer, /) -> int: ... def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: ... # type: ignore[override] @@ -71,7 +71,7 @@ def open( *, level: None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, encoding: str | None = None, errors: str | None = None, newline: str | None = None, @@ -84,7 +84,7 @@ def open( *, level: int | None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, encoding: str | None = None, errors: str | None = None, newline: str | None = None, @@ -97,7 +97,7 @@ def open( *, level: None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, encoding: str | None = None, errors: str | None = None, newline: str | None = None, @@ -110,7 +110,7 @@ def open( *, level: int | None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, encoding: str | None = None, errors: str | None = None, newline: str | None = None, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi index 383a1b7f33..221102ee23 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi @@ -4,7 +4,7 @@ from _typeshed import FileDescriptorOrPath, Unused from abc import ABC, abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType -from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only +from typing import Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ @@ -30,7 +30,6 @@ if sys.version_info >= (3, 11): _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) -_T_io = TypeVar("_T_io", bound=IO[str] | None) _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) _F = TypeVar("_F", bound=Callable[..., Any]) _G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) @@ -141,14 +140,24 @@ class suppress(AbstractContextManager[None, bool]): self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> bool: ... -class _RedirectStream(AbstractContextManager[_T_io, None]): - def __init__(self, new_target: _T_io) -> None: ... +# This is trying to describe what is needed for (most?) uses +# of `redirect_stdout` and `redirect_stderr`. +# https://github.com/python/typeshed/issues/14903 +@type_check_only +class _SupportsRedirect(Protocol): + def write(self, s: str, /) -> int: ... + def flush(self) -> None: ... + +_SupportsRedirectT = TypeVar("_SupportsRedirectT", bound=_SupportsRedirect | None) + +class _RedirectStream(AbstractContextManager[_SupportsRedirectT, None]): + def __init__(self, new_target: _SupportsRedirectT) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> None: ... -class redirect_stdout(_RedirectStream[_T_io]): ... -class redirect_stderr(_RedirectStream[_T_io]): ... +class redirect_stdout(_RedirectStream[_SupportsRedirectT]): ... +class redirect_stderr(_RedirectStream[_SupportsRedirectT]): ... class _BaseExitStack(Generic[_ExitT_co]): def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi index 19bd261c67..be2e7449ef 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi @@ -55,6 +55,9 @@ if sys.version_info >= (3, 14): else: from _ctypes import POINTER as POINTER, pointer as pointer +if sys.version_info >= (3, 14): + CField = _CField + DEFAULT_MODE: Final[int] class ArgumentError(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi index 4ac860f5e6..c6cc5a961e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi @@ -255,6 +255,7 @@ _auto_null: Any class Flag(Enum): _name_: str | None # type: ignore[assignment] _value_: int + _numeric_repr_: Callable[[int], str] @_magic_enum_attr def name(self) -> str | None: ... # type: ignore[override] @_magic_enum_attr @@ -309,6 +310,7 @@ if sys.version_info >= (3, 11): def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... def global_enum_repr(self: Enum) -> str: ... def global_flag_repr(self: Flag) -> str: ... + def show_flag_values(value: int) -> list[int]: ... if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi index 220c41f303..ff8ba7ff1f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi @@ -1,3 +1,4 @@ +import sys from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Generator, Iterable @@ -5,6 +6,10 @@ from typing import Any, Final, TypeVar __all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] +if sys.version_info >= (3, 14): + # Added to __all__ in 3.14.1 + __all__ += ["heapify_max", "heappop_max", "heappush_max", "heappushpop_max", "heapreplace_max"] + _S = TypeVar("_S") __about__: Final[str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi index 7edd39e8c7..08dc7b9369 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi @@ -9,7 +9,8 @@ class HTMLParser(ParserBase): # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] - def __init__(self, *, convert_charrefs: bool = True) -> None: ... + # `scripting` parameter added in Python 3.9.25, 3.10.20, 3.11.15, 3.12.13, 3.13.10, 3.14.1 + def __init__(self, *, convert_charrefs: bool = True, scripting: bool = False) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> None: ... def get_starttag_text(self) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi index d259e84e6f..1568567d58 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi @@ -166,6 +166,7 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incomp def begin(self) -> None: ... class HTTPConnection: + blocksize: int auto_open: int # undocumented debuglevel: int default_port: int # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi index 536985a592..1f0e010600 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi @@ -26,6 +26,7 @@ class IMAP4: class error(Exception): ... class abort(error): ... class readonly(abort): ... + utf8_enabled: bool mustquote: Pattern[str] debug: int state: str @@ -60,7 +61,7 @@ class IMAP4: def socket(self) -> _socket: ... def recent(self) -> _CommandResults: ... def response(self, code: str) -> _CommandResults: ... - def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> tuple[str, _list[bytes]]: ... def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... def capability(self) -> _CommandResults: ... def check(self) -> _CommandResults: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/readers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/readers.pyi index 4a6c739215..0e7f7ce165 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/readers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/readers.pyi @@ -52,9 +52,9 @@ if sys.version_info >= (3, 10): def is_file(self) -> Literal[False]: ... if sys.version_info >= (3, 12): - def joinpath(self, *descendants: str) -> abc.Traversable: ... + def joinpath(self, *descendants: StrPath) -> abc.Traversable: ... elif sys.version_info >= (3, 11): - def joinpath(self, child: str) -> abc.Traversable: ... # type: ignore[override] + def joinpath(self, child: StrPath) -> abc.Traversable: ... # type: ignore[override] else: def joinpath(self, child: str) -> abc.Traversable: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/abc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/abc.pyi index 80d92a6086..9be594a7dc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/abc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/abc.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import StrPath from abc import ABCMeta, abstractmethod from collections.abc import Iterator from io import BufferedReader @@ -24,7 +25,7 @@ if sys.version_info >= (3, 11): @abstractmethod def iterdir(self) -> Iterator[Traversable]: ... @abstractmethod - def joinpath(self, *descendants: str) -> Traversable: ... + def joinpath(self, *descendants: StrPath) -> Traversable: ... # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, @@ -38,7 +39,7 @@ if sys.version_info >= (3, 11): @property @abstractmethod def name(self) -> str: ... - def __truediv__(self, child: str, /) -> Traversable: ... + def __truediv__(self, child: StrPath, /) -> Traversable: ... @abstractmethod def read_bytes(self) -> bytes: ... @abstractmethod diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/simple.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/simple.pyi index c4c758111c..946987c731 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/simple.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources/simple.pyi @@ -1,5 +1,6 @@ import abc import sys +from _typeshed import StrPath from collections.abc import Iterator from io import TextIOWrapper from typing import IO, Any, BinaryIO, Literal, NoReturn, overload @@ -50,7 +51,7 @@ if sys.version_info >= (3, 11): def iterdir(self) -> Iterator[ResourceHandle | ResourceContainer]: ... def open(self, *args: Never, **kwargs: Never) -> NoReturn: ... # type: ignore[override] if sys.version_info < (3, 12): - def joinpath(self, *descendants: str) -> Traversable: ... + def joinpath(self, *descendants: StrPath) -> Traversable: ... class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta): def files(self) -> ResourceContainer: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi index 05c4d0d1ed..577d3a667e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi @@ -12,7 +12,9 @@ from importlib._bootstrap_external import ( spec_from_file_location as spec_from_file_location, ) from importlib.abc import Loader -from typing_extensions import ParamSpec, deprecated +from types import TracebackType +from typing import Literal +from typing_extensions import ParamSpec, Self, deprecated _P = ParamSpec("_P") @@ -44,6 +46,18 @@ class LazyLoader(Loader): def source_hash(source_bytes: ReadableBuffer) -> bytes: ... +if sys.version_info >= (3, 12): + class _incompatible_extension_module_restrictions: + def __init__(self, *, disable_check: bool) -> None: ... + disable_check: bool + old: Literal[-1, 0, 1] # exists only while entered + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @property + def override(self) -> Literal[-1, 1]: ... # undocumented + if sys.version_info >= (3, 14): __all__ = [ "LazyLoader", diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi index e2f3defa2d..d09804cb93 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi @@ -73,7 +73,7 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): @property def broadcast_address(self) -> _A: ... def compare_networks(self, other: Self) -> int: ... - def hosts(self) -> Iterator[_A] | list[_A]: ... + def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi index fae9f849b6..80c39a532d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi @@ -153,6 +153,10 @@ if sys.version_info < (3, 12): def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... def delocalize(string: _str) -> _str: ... + +if sys.version_info >= (3, 10): + def localize(string: _str, grouping: bool = False, monetary: bool = False) -> _str: ... + def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... def str(val: float) -> _str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi index 9914a34a2d..2390b2ce39 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import StrPath -from collections.abc import Sequence +from collections.abc import Iterable from typing import IO __all__ = [ @@ -25,15 +25,15 @@ if sys.version_info >= (3, 13): def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... def guess_extension(type: str, strict: bool = True) -> str | None: ... -def init(files: Sequence[str] | None = None) -> None: ... -def read_mime_types(file: str) -> dict[str, str] | None: ... +def init(files: Iterable[StrPath] | None = None) -> None: ... +def read_mime_types(file: StrPath) -> dict[str, str] | None: ... def add_type(type: str, ext: str, strict: bool = True) -> None: ... if sys.version_info >= (3, 13): def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... inited: bool -knownfiles: list[str] +knownfiles: list[StrPath] suffix_map: dict[str, str] encodings_map: dict[str, str] types_map: dict[str, str] @@ -44,12 +44,12 @@ class MimeTypes: encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] - def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... + def __init__(self, filenames: Iterable[StrPath] = (), strict: bool = True) -> None: ... def add_type(self, type: str, ext: str, strict: bool = True) -> None: ... def guess_extension(self, type: str, strict: bool = True) -> str | None: ... def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... - def read(self, filename: str, strict: bool = True) -> None: ... + def read(self, filename: StrPath, strict: bool = True) -> None: ... def readfp(self, fp: IO[str], strict: bool = True) -> None: ... def read_windows_registry(self, strict: bool = True) -> None: ... if sys.version_info >= (3, 13): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi index 8a5baba629..98183acba4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi @@ -54,28 +54,28 @@ class mmap: ) -> Self: ... def close(self) -> None: ... - def flush(self, offset: int = 0, size: int = ...) -> None: ... - def move(self, dest: int, src: int, count: int) -> None: ... + def flush(self, offset: int = 0, size: int = ..., /) -> None: ... + def move(self, dest: int, src: int, count: int, /) -> None: ... def read_byte(self) -> int: ... def readline(self) -> bytes: ... - def resize(self, newsize: int) -> None: ... + def resize(self, newsize: int, /) -> None: ... if sys.platform != "win32": - def seek(self, pos: int, whence: Literal[0, 1, 2, 3, 4] = os.SEEK_SET) -> None: ... + def seek(self, pos: int, whence: Literal[0, 1, 2, 3, 4] = os.SEEK_SET, /) -> None: ... else: - def seek(self, pos: int, whence: Literal[0, 1, 2] = os.SEEK_SET) -> None: ... + def seek(self, pos: int, whence: Literal[0, 1, 2] = os.SEEK_SET, /) -> None: ... def size(self) -> int: ... def tell(self) -> int: ... - def write_byte(self, byte: int) -> None: ... + def write_byte(self, byte: int, /) -> None: ... def __len__(self) -> int: ... closed: bool if sys.platform != "win32": - def madvise(self, option: int, start: int = 0, length: int = ...) -> None: ... + def madvise(self, option: int, start: int = 0, length: int = ..., /) -> None: ... - def find(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... - def rfind(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... - def read(self, n: int | None = None) -> bytes: ... - def write(self, bytes: ReadableBuffer) -> int: ... + def find(self, view: ReadableBuffer, start: int = ..., end: int = ..., /) -> int: ... + def rfind(self, view: ReadableBuffer, start: int = ..., end: int = ..., /) -> int: ... + def read(self, n: int | None = None, /) -> bytes: ... + def write(self, bytes: ReadableBuffer, /) -> int: ... @overload def __getitem__(self, key: int, /) -> int: ... @overload @@ -92,7 +92,7 @@ class mmap: # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... def __enter__(self) -> Self: ... - def __exit__(self, *args: Unused) -> None: ... + def __exit__(self, exc_type: Unused, exc_value: Unused, traceback: Unused, /) -> None: ... def __buffer__(self, flags: int, /) -> memoryview: ... def __release_buffer__(self, buffer: memoryview, /) -> None: ... if sys.version_info >= (3, 13): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi index 3cbeeb0577..62fef2b080 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi @@ -1,4 +1,5 @@ import array +import sys import threading import weakref from collections.abc import Callable, Iterable, Mapping, Sequence @@ -44,14 +45,25 @@ class DummyProcess(threading.Thread): _start_called: int @property def exitcode(self) -> Literal[0] | None: ... - def __init__( - self, - group: Any = None, - target: Callable[..., object] | None = None, - name: str | None = None, - args: Iterable[Any] = (), - kwargs: Mapping[str, Any] = {}, - ) -> None: ... + if sys.version_info >= (3, 14): + # Default changed in Python 3.14.1 + def __init__( + self, + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = None, + ) -> None: ... + else: + def __init__( + self, + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] | None = {}, + ) -> None: ... Process = DummyProcess diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi index 5efe69a973..87a245b0fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi @@ -192,6 +192,8 @@ class BaseListProxy(BaseProxy, MutableSequence[_T]): def count(self, value: _T, /) -> int: ... def insert(self, index: SupportsIndex, object: _T, /) -> None: ... def remove(self, value: _T, /) -> None: ... + if sys.version_info >= (3, 14): + def copy(self) -> list[_T]: ... # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload @@ -326,8 +328,9 @@ class SyncManager(BaseManager): def dict(self, iterable: Iterable[list[str]], /) -> DictProxy[str, str]: ... @overload def dict(self, iterable: Iterable[list[bytes]], /) -> DictProxy[bytes, bytes]: ... + # Overloads are copied from builtins.list.__init__ @overload - def list(self, sequence: Sequence[_T], /) -> ListProxy[_T]: ... + def list(self, iterable: Iterable[_T], /) -> ListProxy[_T]: ... @overload def list(self) -> ListProxy[Any]: ... if sys.version_info >= (3, 14): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi index 4d129b27b0..c7d13b318a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Callable, Iterable, Mapping from typing import Any @@ -20,6 +21,9 @@ class BaseProcess: ) -> None: ... def run(self) -> None: ... def start(self) -> None: ... + if sys.version_info >= (3, 14): + def interrupt(self) -> None: ... + def terminate(self) -> None: ... def kill(self) -> None: ... def close(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi index a0d97baa06..541e0b05dd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi @@ -1,3 +1,4 @@ +import sys import threading from collections.abc import Callable from multiprocessing.context import BaseContext @@ -45,6 +46,8 @@ class SemLock: # These methods are copied from the wrapped _multiprocessing.SemLock object def acquire(self, block: bool = True, timeout: float | None = None) -> bool: ... def release(self) -> None: ... + if sys.version_info >= (3, 14): + def locked(self) -> bool: ... class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi index c522917992..305b6a4f06 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi @@ -204,7 +204,7 @@ class OptionContainer: callback_kwargs: dict[str, Any] | None = None, help: str | None = None, metavar: str | None = None, - **kwargs, # Allow arbitrary keyword arguments for user defined option_class + **kwargs: Any, # Allow arbitrary keyword arguments for user defined option_class ) -> Option: ... def add_options(self, option_list: Iterable[Option]) -> None: ... def destroy(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi index 71c79dfac3..7801b91702 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi @@ -41,10 +41,22 @@ from typing import ( runtime_checkable, type_check_only, ) -from typing_extensions import Self, TypeAlias, Unpack, deprecated +from typing_extensions import LiteralString, Self, TypeAlias, Unpack, deprecated from . import path as _path +# Re-export common definitions from os.path to reduce duplication +from .path import ( + altsep as altsep, + curdir as curdir, + defpath as defpath, + devnull as devnull, + extsep as extsep, + pardir as pardir, + pathsep as pathsep, + sep as sep, +) + __all__ = [ "F_OK", "O_APPEND", @@ -162,7 +174,8 @@ __all__ = [ "write", ] if sys.version_info >= (3, 14): - __all__ += ["readinto"] + # reload_environ was added to __all__ in Python 3.14.1 + __all__ += ["readinto", "reload_environ"] if sys.platform == "darwin" and sys.version_info >= (3, 12): __all__ += ["PRIO_DARWIN_BG", "PRIO_DARWIN_NONUI", "PRIO_DARWIN_PROCESS", "PRIO_DARWIN_THREAD"] if sys.platform == "darwin" and sys.version_info >= (3, 10): @@ -674,19 +687,8 @@ if sys.platform != "win32": ST_NOSUID: Final[int] ST_RDONLY: Final[int] -curdir: str -pardir: str -sep: str -if sys.platform == "win32": - altsep: str -else: - altsep: str | None -extsep: str -pathsep: str -defpath: str linesep: Literal["\n", "\r\n"] -devnull: str -name: str +name: LiteralString F_OK: Final = 0 R_OK: Final = 4 @@ -708,6 +710,18 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): encodevalue: _EnvironCodeFunc[AnyStr], decodevalue: _EnvironCodeFunc[AnyStr], ) -> None: ... + @overload + def get(self, key: AnyStr, default: None = None) -> AnyStr | None: ... + @overload + def get(self, key: AnyStr, default: AnyStr) -> AnyStr: ... + @overload + def get(self, key: AnyStr, default: _T) -> AnyStr | _T: ... + @overload + def pop(self, key: AnyStr) -> AnyStr: ... + @overload + def pop(self, key: AnyStr, default: AnyStr) -> AnyStr: ... + @overload + def pop(self, key: AnyStr, default: _T) -> AnyStr | _T: ... def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... def copy(self) -> dict[AnyStr, AnyStr]: ... def __delitem__(self, key: AnyStr) -> None: ... @@ -729,6 +743,9 @@ environ: _Environ[str] if sys.platform != "win32": environb: _Environ[bytes] +if sys.version_info >= (3, 14): + def reload_environ() -> None: ... + if sys.version_info >= (3, 11) or sys.platform != "win32": EX_OK: Final[int] @@ -1390,19 +1407,48 @@ class _wrap_close: def write(self, s: str, /) -> int: ... def writelines(self, lines: Iterable[str], /) -> None: ... -def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... -def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... -def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig +if sys.version_info >= (3, 14): + @deprecated("Soft deprecated. Use the subprocess module instead.") + def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig + +else: + def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... + def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig if sys.platform != "win32": - def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... - def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + if sys.version_info >= (3, 14): + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + + else: + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... else: - def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... - def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... + if sys.version_info >= (3, 14): + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... + + else: + def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... + +if sys.version_info >= (3, 14): + @deprecated("Soft deprecated. Use the subprocess module instead.") + def system(command: StrOrBytesPath) -> int: ... + +else: + def system(command: StrOrBytesPath) -> int: ... -def system(command: StrOrBytesPath) -> int: ... @final class times_result(structseq[float], tuple[float, float, float, float, float]): if sys.version_info >= (3, 10): @@ -1435,10 +1481,22 @@ if sys.platform == "win32": def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ... else: - def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... - def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature - def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... - def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + if sys.version_info >= (3, 14): + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + @deprecated("Soft deprecated. Use the subprocess module instead.") + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + + else: + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def wait() -> tuple[int, int]: ... # Unix only # Added to MacOS in 3.13 if sys.platform != "darwin" or sys.version_info >= (3, 13): @@ -1472,34 +1530,66 @@ else: def WEXITSTATUS(status: int) -> int: ... def WSTOPSIG(status: int) -> int: ... def WTERMSIG(status: int) -> int: ... - def posix_spawn( - path: StrOrBytesPath, - argv: _ExecVArgs, - env: _ExecEnv, - /, - *, - file_actions: Sequence[tuple[Any, ...]] | None = ..., - setpgroup: int | None = ..., - resetids: bool = ..., - setsid: bool = ..., - setsigmask: Iterable[int] = ..., - setsigdef: Iterable[int] = ..., - scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: ... - def posix_spawnp( - path: StrOrBytesPath, - argv: _ExecVArgs, - env: _ExecEnv, - /, - *, - file_actions: Sequence[tuple[Any, ...]] | None = ..., - setpgroup: int | None = ..., - resetids: bool = ..., - setsid: bool = ..., - setsigmask: Iterable[int] = ..., - setsigdef: Iterable[int] = ..., - scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: ... + + if sys.version_info >= (3, 13): + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv | None, # None allowed starting in 3.13 + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv | None, # None allowed starting in 3.13 + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + else: + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + /, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + POSIX_SPAWN_OPEN: Final = 0 POSIX_SPAWN_CLOSE: Final = 1 POSIX_SPAWN_DUP2: Final = 2 diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi index 26140c7624..9b287fcc65 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi @@ -7,8 +7,8 @@ def expr(source: str) -> STType: ... def suite(source: str) -> STType: ... def sequence2st(sequence: Sequence[Any]) -> STType: ... def tuple2st(sequence: Sequence[Any]) -> STType: ... -def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... -def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... +def st2list(st: STType, line_info: bool = False, col_info: bool = False) -> list[Any]: ... +def st2tuple(st: STType, line_info: bool = False, col_info: bool = False) -> tuple[Any, ...]: ... def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... def isexpr(st: STType) -> bool: ... def issuite(st: STType) -> bool: ... @@ -21,5 +21,5 @@ class STType: def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... def isexpr(self) -> bool: ... def issuite(self) -> bool: ... - def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... - def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... + def tolist(self, line_info: bool = False, col_info: bool = False) -> list[Any]: ... + def totuple(self, line_info: bool = False, col_info: bool = False) -> tuple[Any, ...]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pathlib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pathlib/__init__.pyi index fa5143f202..26dde2accd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pathlib/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pathlib/__init__.pyi @@ -90,6 +90,7 @@ class PurePath(PathLike[str]): def __rtruediv__(self, key: StrPath) -> Self: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... + @deprecated("Deprecated since Python 3.14; will be removed in Python 3.19. Use `Path.as_uri()` instead.") def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... if sys.version_info >= (3, 13): @@ -103,6 +104,10 @@ class PurePath(PathLike[str]): if sys.version_info >= (3, 14): def is_relative_to(self, other: StrPath) -> bool: ... elif sys.version_info >= (3, 12): + @overload + def is_relative_to(self, other: StrPath, /) -> bool: ... + @overload + @deprecated("Passing additional arguments is deprecated since Python 3.12; removed in Python 3.14.") def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... else: def is_relative_to(self, *other: StrPath) -> bool: ... @@ -115,6 +120,10 @@ class PurePath(PathLike[str]): if sys.version_info >= (3, 14): def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: ... elif sys.version_info >= (3, 12): + @overload + def relative_to(self, other: StrPath, /, *, walk_up: bool = False) -> Self: ... + @overload + @deprecated("Passing additional arguments is deprecated since Python 3.12; removed in Python 3.14.") def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... else: def relative_to(self, *other: StrPath) -> Self: ... @@ -345,6 +354,8 @@ class Path(PurePath): self, top_down: bool = True, on_error: Callable[[OSError], object] | None = None, follow_symlinks: bool = False ) -> Iterator[tuple[Self, list[str], list[str]]]: ... + def as_uri(self) -> str: ... + class PosixPath(Path, PurePosixPath): __slots__ = () diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi index 2f114b2057..dc1cf3b280 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi @@ -8,7 +8,7 @@ from linecache import _ModuleGlobals from rlcompleter import Completer from types import CodeType, FrameType, TracebackType from typing import IO, Any, ClassVar, Final, Literal, TypeVar -from typing_extensions import ParamSpec, Self, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias, deprecated __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] if sys.version_info >= (3, 14): @@ -60,7 +60,17 @@ class Pdb(Bdb, Cmd): stack: list[tuple[FrameType, int]] curindex: int curframe: FrameType | None - curframe_locals: Mapping[str, Any] + if sys.version_info >= (3, 13): + @property + @deprecated("The frame locals reference is no longer cached. Use 'curframe.f_locals' instead.") + def curframe_locals(self) -> Mapping[str, Any]: ... + @curframe_locals.setter + @deprecated( + "Setting 'curframe_locals' no longer has any effect as of 3.14. Update the contents of 'curframe.f_locals' instead." + ) + def curframe_locals(self, value: Mapping[str, Any]) -> None: ... + else: + curframe_locals: Mapping[str, Any] if sys.version_info >= (3, 14): mode: _Mode | None colorize: bool @@ -201,8 +211,8 @@ class Pdb(Bdb, Cmd): def completenames(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override] if sys.version_info >= (3, 12): def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ... - if sys.version_info >= (3, 13) and sys.version_info < (3, 14): - # Added in 3.13.8. + if sys.version_info >= (3, 13): + # Added in 3.13.8 and 3.14.1 @property def rlcompleter(self) -> type[Completer]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi index 21e6760520..bc522d5f3c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable from pyexpat import errors as errors, model as model @@ -29,6 +30,11 @@ class XMLParserType: def UseForeignDTD(self, flag: bool = True, /) -> None: ... def GetReparseDeferralEnabled(self) -> bool: ... def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: ... + if sys.version_info >= (3, 10): + # Added in Python 3.10.20, 3.11.15, 3.12.3, 3.13.10, 3.14.1 + def SetAllocTrackerActivationThreshold(self, threshold: int, /) -> None: ... + def SetAllocTrackerMaximumAmplification(self, max_factor: float, /) -> None: ... + @property def intern(self) -> dict[str, str]: ... buffer_size: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi index 587bc75376..43a9e4274b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi @@ -2,8 +2,8 @@ import sys from _typeshed import FileDescriptorLike from collections.abc import Iterable from types import TracebackType -from typing import Any, ClassVar, Final, final -from typing_extensions import Self +from typing import Any, ClassVar, Final, TypeVar, final +from typing_extensions import Never, Self if sys.platform != "win32": PIPE_BUF: Final[int] @@ -31,9 +31,13 @@ if sys.platform != "win32": def unregister(self, fd: FileDescriptorLike, /) -> None: ... def poll(self, timeout: float | None = None, /) -> list[tuple[int, int]]: ... +_R = TypeVar("_R", default=Never) +_W = TypeVar("_W", default=Never) +_X = TypeVar("_X", default=Never) + def select( - rlist: Iterable[Any], wlist: Iterable[Any], xlist: Iterable[Any], timeout: float | None = None, / -) -> tuple[list[Any], list[Any], list[Any]]: ... + rlist: Iterable[_R], wlist: Iterable[_W], xlist: Iterable[_X], timeout: float | None = None, / +) -> tuple[list[_R], list[_W], list[_X]]: ... error = OSError diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi index b10b3560b9..92bf48c3a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi @@ -969,6 +969,21 @@ if sys.platform != "linux": if sys.platform != "darwin" and sys.platform != "linux": __all__ += ["AF_BLUETOOTH"] +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + from _socket import BTPROTO_HCI as BTPROTO_HCI, BTPROTO_L2CAP as BTPROTO_L2CAP, BTPROTO_SCO as BTPROTO_SCO + + __all__ += ["BTPROTO_HCI", "BTPROTO_L2CAP", "BTPROTO_SCO"] + +if sys.platform != "win32" and sys.platform != "darwin" and sys.platform != "linux": + from _socket import HCI_DATA_DIR as HCI_DATA_DIR, HCI_FILTER as HCI_FILTER, HCI_TIME_STAMP as HCI_TIME_STAMP + + __all__ += ["HCI_FILTER", "HCI_TIME_STAMP", "HCI_DATA_DIR"] + +if sys.version_info >= (3, 11) and sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from _socket import LOCAL_CREDS as LOCAL_CREDS, LOCAL_CREDS_PERSISTENT as LOCAL_CREDS_PERSISTENT, SCM_CREDS2 as SCM_CREDS2 + + __all__ += ["SCM_CREDS2", "LOCAL_CREDS", "LOCAL_CREDS_PERSISTENT"] + if sys.platform == "win32" and sys.version_info >= (3, 12): __all__ += ["AF_HYPERV"] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi index 6b0f1ba949..04b978b1b5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi @@ -222,6 +222,7 @@ _AdaptedInputData: TypeAlias = _SqliteData | Any _Parameters: TypeAlias = SupportsLenAndGetItem[_AdaptedInputData] | Mapping[str, _AdaptedInputData] # Controls the legacy transaction handling mode of sqlite3. _IsolationLevel: TypeAlias = Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None +_RowFactoryOptions: TypeAlias = type[Row] | Callable[[Cursor, Row], object] | None @type_check_only class _AnyParamWindowAggregateClass(Protocol): @@ -300,7 +301,7 @@ class Connection: def autocommit(self) -> int: ... @autocommit.setter def autocommit(self, val: int) -> None: ... - row_factory: Any + row_factory: _RowFactoryOptions text_factory: Any if sys.version_info >= (3, 12): def __init__( @@ -416,7 +417,7 @@ class Cursor: def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | MaybeNone: ... @property def lastrowid(self) -> int | None: ... - row_factory: Callable[[Cursor, Row], object] | None + row_factory: _RowFactoryOptions @property def rowcount(self) -> int: ... def __init__(self, cursor: Connection, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi index faa98cb399..aa94fc8425 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi @@ -33,6 +33,9 @@ from typing_extensions import Never, Self, TypeAlias, deprecated if sys.version_info >= (3, 13): from _ssl import HAS_PSK as HAS_PSK +if sys.version_info >= (3, 14): + from _ssl import HAS_PHA as HAS_PHA + if sys.version_info < (3, 12): from _ssl import RAND_pseudo_bytes as RAND_pseudo_bytes diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi index face28ab0c..6c26080e06 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi @@ -1,7 +1,114 @@ import sys -from _stat import * +from _stat import ( + S_ENFMT as S_ENFMT, + S_IEXEC as S_IEXEC, + S_IFBLK as S_IFBLK, + S_IFCHR as S_IFCHR, + S_IFDIR as S_IFDIR, + S_IFDOOR as S_IFDOOR, + S_IFIFO as S_IFIFO, + S_IFLNK as S_IFLNK, + S_IFMT as S_IFMT, + S_IFPORT as S_IFPORT, + S_IFREG as S_IFREG, + S_IFSOCK as S_IFSOCK, + S_IFWHT as S_IFWHT, + S_IMODE as S_IMODE, + S_IREAD as S_IREAD, + S_IRGRP as S_IRGRP, + S_IROTH as S_IROTH, + S_IRUSR as S_IRUSR, + S_IRWXG as S_IRWXG, + S_IRWXO as S_IRWXO, + S_IRWXU as S_IRWXU, + S_ISBLK as S_ISBLK, + S_ISCHR as S_ISCHR, + S_ISDIR as S_ISDIR, + S_ISDOOR as S_ISDOOR, + S_ISFIFO as S_ISFIFO, + S_ISGID as S_ISGID, + S_ISLNK as S_ISLNK, + S_ISPORT as S_ISPORT, + S_ISREG as S_ISREG, + S_ISSOCK as S_ISSOCK, + S_ISUID as S_ISUID, + S_ISVTX as S_ISVTX, + S_ISWHT as S_ISWHT, + S_IWGRP as S_IWGRP, + S_IWOTH as S_IWOTH, + S_IWRITE as S_IWRITE, + S_IWUSR as S_IWUSR, + S_IXGRP as S_IXGRP, + S_IXOTH as S_IXOTH, + S_IXUSR as S_IXUSR, + SF_APPEND as SF_APPEND, + SF_ARCHIVED as SF_ARCHIVED, + SF_IMMUTABLE as SF_IMMUTABLE, + SF_NOUNLINK as SF_NOUNLINK, + SF_SNAPSHOT as SF_SNAPSHOT, + ST_ATIME as ST_ATIME, + ST_CTIME as ST_CTIME, + ST_DEV as ST_DEV, + ST_GID as ST_GID, + ST_INO as ST_INO, + ST_MODE as ST_MODE, + ST_MTIME as ST_MTIME, + ST_NLINK as ST_NLINK, + ST_SIZE as ST_SIZE, + ST_UID as ST_UID, + UF_APPEND as UF_APPEND, + UF_COMPRESSED as UF_COMPRESSED, + UF_HIDDEN as UF_HIDDEN, + UF_IMMUTABLE as UF_IMMUTABLE, + UF_NODUMP as UF_NODUMP, + UF_NOUNLINK as UF_NOUNLINK, + UF_OPAQUE as UF_OPAQUE, + filemode as filemode, +) from typing import Final +if sys.platform == "win32": + from _stat import ( + IO_REPARSE_TAG_APPEXECLINK as IO_REPARSE_TAG_APPEXECLINK, + IO_REPARSE_TAG_MOUNT_POINT as IO_REPARSE_TAG_MOUNT_POINT, + IO_REPARSE_TAG_SYMLINK as IO_REPARSE_TAG_SYMLINK, + ) + +if sys.version_info >= (3, 13): + from _stat import ( + SF_DATALESS as SF_DATALESS, + SF_FIRMLINK as SF_FIRMLINK, + SF_SETTABLE as SF_SETTABLE, + UF_DATAVAULT as UF_DATAVAULT, + UF_SETTABLE as UF_SETTABLE, + UF_TRACKED as UF_TRACKED, + ) + + if sys.platform == "darwin": + from _stat import SF_SUPPORTED as SF_SUPPORTED, SF_SYNTHETIC as SF_SYNTHETIC + +# _stat.c defines FILE_ATTRIBUTE_* constants conditionally, +# making them available only at runtime on Windows. +# stat.py unconditionally redefines the same FILE_ATTRIBUTE_* constants +# on all platforms. +FILE_ATTRIBUTE_ARCHIVE: Final = 32 +FILE_ATTRIBUTE_COMPRESSED: Final = 2048 +FILE_ATTRIBUTE_DEVICE: Final = 64 +FILE_ATTRIBUTE_DIRECTORY: Final = 16 +FILE_ATTRIBUTE_ENCRYPTED: Final = 16384 +FILE_ATTRIBUTE_HIDDEN: Final = 2 +FILE_ATTRIBUTE_INTEGRITY_STREAM: Final = 32768 +FILE_ATTRIBUTE_NORMAL: Final = 128 +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Final = 8192 +FILE_ATTRIBUTE_NO_SCRUB_DATA: Final = 131072 +FILE_ATTRIBUTE_OFFLINE: Final = 4096 +FILE_ATTRIBUTE_READONLY: Final = 1 +FILE_ATTRIBUTE_REPARSE_POINT: Final = 1024 +FILE_ATTRIBUTE_SPARSE_FILE: Final = 512 +FILE_ATTRIBUTE_SYSTEM: Final = 4 +FILE_ATTRIBUTE_TEMPORARY: Final = 256 +FILE_ATTRIBUTE_VIRTUAL: Final = 65536 + if sys.version_info >= (3, 13): # https://github.com/python/cpython/issues/114081#issuecomment-2119017790 SF_RESTRICTED: Final = 0x00080000 diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi index e1e25bcb50..f6d7b88193 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi @@ -96,7 +96,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -130,7 +130,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -164,7 +164,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -198,7 +198,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -233,7 +233,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -267,7 +267,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -304,7 +304,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -337,7 +337,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -370,7 +370,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -403,7 +403,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -437,7 +437,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -470,7 +470,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -497,7 +497,6 @@ elif sys.version_info >= (3, 10): ) -> CompletedProcess[Any]: ... else: - # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @overload def run( args: _CMD, @@ -506,7 +505,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -538,7 +537,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -570,7 +569,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -602,7 +601,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -635,7 +634,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -667,7 +666,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -702,7 +701,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -734,7 +733,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -764,7 +763,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -795,7 +794,7 @@ if sys.version_info >= (3, 11): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -827,7 +826,7 @@ elif sys.version_info >= (3, 10): stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -857,7 +856,7 @@ else: stdin: _FILE = None, stdout: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -887,7 +886,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -918,7 +917,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -949,7 +948,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -980,7 +979,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1012,7 +1011,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1043,7 +1042,7 @@ if sys.version_info >= (3, 11): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1077,7 +1076,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1107,7 +1106,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1137,7 +1136,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1167,7 +1166,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1198,7 +1197,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1228,7 +1227,7 @@ elif sys.version_info >= (3, 10): executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1260,7 +1259,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1289,7 +1288,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1318,7 +1317,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1347,7 +1346,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1377,7 +1376,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1406,7 +1405,7 @@ else: executable: StrOrBytesPath | None = None, stdin: _FILE = None, stderr: _FILE = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1481,7 +1480,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1512,7 +1511,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1543,7 +1542,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1575,7 +1574,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1606,7 +1605,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1637,7 +1636,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1670,7 +1669,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1700,7 +1699,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1730,7 +1729,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1761,7 +1760,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1791,7 +1790,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1821,7 +1820,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1852,7 +1851,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1881,7 +1880,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1910,7 +1909,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1940,7 +1939,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1969,7 +1968,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, @@ -1998,7 +1997,7 @@ class Popen(Generic[AnyStr]): stdin: _FILE | None = None, stdout: _FILE | None = None, stderr: _FILE | None = None, - preexec_fn: Callable[[], Any] | None = None, + preexec_fn: Callable[[], object] | None = None, close_fds: bool = True, shell: bool = False, cwd: StrOrBytesPath | None = None, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sys/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sys/__init__.pyi index 7807b0eab0..6abef85dfb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sys/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sys/__init__.pyi @@ -5,7 +5,7 @@ from builtins import object as _object from collections.abc import AsyncGenerator, Callable, Sequence from io import TextIOWrapper from types import FrameType, ModuleType, TracebackType -from typing import Any, Final, Literal, NoReturn, Protocol, TextIO, TypeVar, final, type_check_only +from typing import Any, Final, Literal, NoReturn, Protocol, TextIO, TypeVar, final, overload, type_check_only from typing_extensions import LiteralString, TypeAlias, deprecated _T = TypeVar("_T") @@ -354,6 +354,13 @@ else: def _current_frames() -> dict[int, FrameType]: ... def _getframe(depth: int = 0, /) -> FrameType: ... +# documented -- see https://docs.python.org/3/library/sys.html#sys._current_exceptions +if sys.version_info >= (3, 12): + def _current_exceptions() -> dict[int, BaseException | None]: ... + +else: + def _current_exceptions() -> dict[int, OptExcInfo]: ... + if sys.version_info >= (3, 12): def _getframemodulename(depth: int = 0) -> str | None: ... @@ -366,14 +373,18 @@ if sys.version_info >= (3, 11): def exception() -> BaseException | None: ... def exit(status: _ExitCode = None, /) -> NoReturn: ... + +if sys.platform == "android": # noqa: Y008 + def getandroidapilevel() -> int: ... + def getallocatedblocks() -> int: ... -def getdefaultencoding() -> str: ... +def getdefaultencoding() -> Literal["utf-8"]: ... if sys.platform != "win32": def getdlopenflags() -> int: ... -def getfilesystemencoding() -> str: ... -def getfilesystemencodeerrors() -> str: ... +def getfilesystemencoding() -> LiteralString: ... +def getfilesystemencodeerrors() -> LiteralString: ... def getrefcount(object: Any, /) -> int: ... def getrecursionlimit() -> int: ... def getsizeof(obj: object, default: int = ...) -> int: ... @@ -411,7 +422,12 @@ if sys.platform == "win32": def getwindowsversion() -> _WinVersion: ... -def intern(string: str, /) -> str: ... +@overload +def intern(string: LiteralString, /) -> LiteralString: ... +@overload +def intern(string: str, /) -> str: ... # type: ignore[misc] + +__interactivehook__: Callable[[], object] if sys.version_info >= (3, 13): def _is_gil_enabled() -> bool: ... @@ -501,3 +517,4 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def is_remote_debug_enabled() -> bool: ... def remote_exec(pid: int, script: StrOrBytesPath) -> None: ... + def _is_immortal(op: object, /) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sys/_monitoring.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sys/_monitoring.pyi index 5d231c7a93..83f1e7dd0f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sys/_monitoring.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sys/_monitoring.pyi @@ -17,6 +17,10 @@ PROFILER_ID: Final = 2 OPTIMIZER_ID: Final = 5 def use_tool_id(tool_id: int, name: str, /) -> None: ... + +if sys.version_info >= (3, 14): + def clear_tool_id(tool_id: int, /) -> None: ... + def free_tool_id(tool_id: int, /) -> None: ... def get_tool(tool_id: int, /) -> str | None: ... @@ -43,10 +47,10 @@ class _events: STOP_ITERATION: Final[int] if sys.version_info >= (3, 14): BRANCH_LEFT: Final[int] - BRANCH_TAKEN: Final[int] + BRANCH_RIGHT: Final[int] @property - @deprecated("Deprecated since Python 3.14. Use `BRANCH_LEFT` or `BRANCH_TAKEN` instead.") + @deprecated("Deprecated since Python 3.14. Use `BRANCH_LEFT` or `BRANCH_RIGHT` instead.") def BRANCH(self) -> int: ... else: @@ -55,10 +59,10 @@ class _events: def get_events(tool_id: int, /) -> int: ... def set_events(tool_id: int, event_set: int, /) -> None: ... def get_local_events(tool_id: int, code: CodeType, /) -> int: ... -def set_local_events(tool_id: int, code: CodeType, event_set: int, /) -> int: ... +def set_local_events(tool_id: int, code: CodeType, event_set: int, /) -> None: ... def restart_events() -> None: ... DISABLE: Final[object] MISSING: Final[object] -def register_callback(tool_id: int, event: int, func: Callable[..., Any] | None, /) -> Callable[..., Any] | None: ... +def register_callback(tool_id: int, event: int, func: Callable[..., object] | None, /) -> Callable[..., Any] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi index 807a979050..8de7ddc425 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi @@ -1,6 +1,6 @@ import sys from typing import IO, Any, Literal, overload -from typing_extensions import deprecated +from typing_extensions import LiteralString, deprecated __all__ = [ "get_config_h_filename", @@ -28,8 +28,10 @@ def get_config_vars(arg: str, /, *args: str) -> list[Any]: ... def get_scheme_names() -> tuple[str, ...]: ... if sys.version_info >= (3, 10): - def get_default_scheme() -> str: ... - def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... + def get_default_scheme() -> LiteralString: ... + def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> LiteralString: ... + # Documented -- see https://docs.python.org/3/library/sysconfig.html#sysconfig._get_preferred_schemes + def _get_preferred_schemes() -> dict[Literal["prefix", "home", "user"], LiteralString]: ... def get_path_names() -> tuple[str, ...]: ... def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... @@ -37,7 +39,14 @@ def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: boo def get_python_version() -> str: ... def get_platform() -> str: ... -if sys.version_info >= (3, 11): +if sys.version_info >= (3, 12): + @overload + def is_python_build() -> bool: ... + @overload + @deprecated("The `check_home` parameter is deprecated since Python 3.12; removed in Python 3.15.") + def is_python_build(check_home: object = None) -> bool: ... + +elif sys.version_info >= (3, 11): def is_python_build(check_home: object = None) -> bool: ... else: diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi index f6623ea992..ba2bd16db4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi @@ -127,7 +127,7 @@ class TarFile: encoding: str | None errors: str fileobject: type[ExFileObject] # undocumented - pax_headers: Mapping[str, str] | None + pax_headers: Mapping[str, str] debug: int | None errorlevel: int | None offset: int # undocumented @@ -214,7 +214,7 @@ class TarFile: errorlevel: int | None = ..., level: None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> Self: ... @overload @@ -355,7 +355,7 @@ class TarFile: debug: int | None = ..., errorlevel: int | None = ..., options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> Self: ... @overload @classmethod @@ -376,7 +376,7 @@ class TarFile: debug: int | None = ..., errorlevel: int | None = ..., options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, ) -> Self: ... @overload @@ -611,7 +611,7 @@ class TarFile: fileobj: IO[bytes] | None = None, level: None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -631,7 +631,7 @@ class TarFile: fileobj: IO[bytes] | None = None, level: int | None = None, options: Mapping[int, int] | None = None, - zstd_dict: ZstdDict | None = None, + zstd_dict: ZstdDict | tuple[ZstdDict, int] | None = None, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi index 28fa5267a9..abc7fe7e81 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi @@ -1,6 +1,6 @@ import _thread import sys -from _thread import _excepthook, _ExceptHookArgs, get_native_id as get_native_id +from _thread import _ExceptHookArgs, get_native_id as get_native_id from _typeshed import ProfileFunction, TraceFunction from collections.abc import Callable, Iterable, Mapping from contextvars import ContextVar @@ -54,12 +54,12 @@ def currentThread() -> Thread: ... def get_ident() -> int: ... def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... -def settrace(func: TraceFunction) -> None: ... +def settrace(func: TraceFunction | None) -> None: ... def setprofile(func: ProfileFunction | None) -> None: ... if sys.version_info >= (3, 12): def setprofile_all_threads(func: ProfileFunction | None) -> None: ... - def settrace_all_threads(func: TraceFunction) -> None: ... + def settrace_all_threads(func: TraceFunction | None) -> None: ... if sys.version_info >= (3, 10): def gettrace() -> TraceFunction | None: ... @@ -144,6 +144,9 @@ class Condition: ) -> None: ... def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... + if sys.version_info >= (3, 14): + def locked(self) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... def notify(self, n: int = 1) -> None: ... @@ -169,7 +172,9 @@ class Event: def clear(self) -> None: ... def wait(self, timeout: float | None = None) -> bool: ... -excepthook = _excepthook +excepthook: Callable[[_ExceptHookArgs], object] +if sys.version_info >= (3, 10): + __excepthook__: Callable[[_ExceptHookArgs], object] ExceptHookArgs = _ExceptHookArgs class Timer(Thread): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/constants.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/constants.pyi index fbfe8b49b9..eb1ef446cf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/constants.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/constants.pyi @@ -1,12 +1,12 @@ from typing import Final # These are not actually bools. See #4669 -NO: Final[bool] -YES: Final[bool] -TRUE: Final[bool] -FALSE: Final[bool] -ON: Final[bool] -OFF: Final[bool] +YES: Final = True +NO: Final = False +TRUE: Final = True +FALSE: Final = False +ON: Final = True +OFF: Final = False N: Final = "n" S: Final = "s" W: Final = "w" diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi index 1d72acd995..7143c7cce5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi @@ -2,10 +2,10 @@ import _tkinter import sys import tkinter from _typeshed import MaybeNone -from collections.abc import Callable, Iterable +from collections.abc import Callable, Iterable, Sequence from tkinter.font import _FontDescription -from typing import Any, Literal, TypedDict, overload, type_check_only -from typing_extensions import Never, TypeAlias, Unpack +from typing import Any, Literal, TypedDict, TypeVar, overload, type_check_only +from typing_extensions import Never, ParamSpec, TypeAlias, Unpack __all__ = [ "Button", @@ -54,6 +54,9 @@ _Statespec: TypeAlias = tuple[Unpack[tuple[str, ...]], Any] _ImageStatespec: TypeAlias = tuple[Unpack[tuple[str, ...]], tkinter._Image | str] _VsapiStatespec: TypeAlias = tuple[Unpack[tuple[str, ...]], int] +_P = ParamSpec("_P") +_T = TypeVar("_T") + class _Layout(TypedDict, total=False): side: Literal["left", "right", "top", "bottom"] sticky: str # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty @@ -201,10 +204,15 @@ class Style: def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: ... + def __init__(self, master: tkinter.Misc | None, widgetname: str | None, kw: dict[str, Any] | None = None) -> None: ... def identify(self, x: int, y: int) -> str: ... - def instate(self, statespec, callback=None, *args, **kw): ... - def state(self, statespec=None): ... + @overload + def instate(self, statespec: Sequence[str], callback: None = None) -> bool: ... + @overload + def instate( + self, statespec: Sequence[str], callback: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs + ) -> Literal[False] | _T: ... + def state(self, statespec: Sequence[str] | None = None) -> tuple[str, ...]: ... class Button(Widget): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi index 649e463ff7..568f6ea68d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi @@ -400,7 +400,7 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): @property def gi_code(self) -> CodeType: ... @property - def gi_frame(self) -> FrameType: ... + def gi_frame(self) -> FrameType | None: ... @property def gi_running(self) -> bool: ... @property @@ -429,7 +429,7 @@ class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property def ag_code(self) -> CodeType: ... @property - def ag_frame(self) -> FrameType: ... + def ag_frame(self) -> FrameType | None: ... @property def ag_running(self) -> bool: ... __name__: str @@ -462,13 +462,8 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): def cr_await(self) -> Any | None: ... @property def cr_code(self) -> CodeType: ... - if sys.version_info >= (3, 12): - @property - def cr_frame(self) -> FrameType | None: ... - else: - @property - def cr_frame(self) -> FrameType: ... - + @property + def cr_frame(self) -> FrameType | None: ... @property def cr_running(self) -> bool: ... @property @@ -717,9 +712,9 @@ if sys.version_info >= (3, 10): @final class EllipsisType: ... - from builtins import _NotImplementedType + @final + class NotImplementedType(Any): ... - NotImplementedType = _NotImplementedType @final class UnionType: @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi index 746395fc89..196a7b87f0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi @@ -222,10 +222,6 @@ class TypeVar: @property def evaluate_default(self) -> EvaluateFunc | None: ... -# Used for an undocumented mypy feature. Does not exist at runtime. -# Obsolete, use _typeshed._type_checker_internals.promote instead. -_promote = object() - # N.B. Keep this definition in sync with typing_extensions._SpecialForm @final class _SpecialForm(_Final): @@ -391,6 +387,7 @@ if sys.version_info >= (3, 10): def __or__(self, other: Any) -> _SpecialForm: ... def __ror__(self, other: Any) -> _SpecialForm: ... __supertype__: type | NewType + __name__: str else: def NewType(name: str, tp: Any) -> Any: ... @@ -412,7 +409,13 @@ _TC = TypeVar("_TC", bound=type[object]) def overload(func: _F) -> _F: ... def no_type_check(arg: _F) -> _F: ... -def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... + +if sys.version_info >= (3, 13): + @deprecated("Deprecated since Python 3.13; removed in Python 3.15.") + def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... + +else: + def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # This itself is only available during type checking def type_check_only(func_or_cls: _FT) -> _FT: ... @@ -759,6 +762,11 @@ class ValuesView(MappingView, Collection[_VT_co]): def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... +# note for Mapping.get and MutableMapping.pop and MutableMapping.setdefault +# In _collections_abc.py the parameters are positional-or-keyword, +# but dict and types.MappingProxyType (the vast majority of Mapping types) +# don't allow keyword arguments. + class Mapping(Collection[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @@ -768,9 +776,9 @@ class Mapping(Collection[_KT], Generic[_KT, _VT_co]): @overload def get(self, key: _KT, /) -> _VT_co | None: ... @overload - def get(self, key: _KT, /, default: _VT_co) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + def get(self, key: _KT, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload - def get(self, key: _KT, /, default: _T) -> _VT_co | _T: ... + def get(self, key: _KT, default: _T, /) -> _VT_co | _T: ... def items(self) -> ItemsView[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... @@ -786,9 +794,9 @@ class MutableMapping(Mapping[_KT, _VT]): @overload def pop(self, key: _KT, /) -> _VT: ... @overload - def pop(self, key: _KT, /, default: _VT) -> _VT: ... + def pop(self, key: _KT, default: _VT, /) -> _VT: ... @overload - def pop(self, key: _KT, /, default: _T) -> _VT | _T: ... + def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: ... # This overload should be allowed only if the value type is compatible with None. # @@ -1005,9 +1013,7 @@ class NamedTuple(tuple[Any, ...]): @overload def __init__(self, typename: str, fields: Iterable[tuple[str, Any]], /) -> None: ... @overload - @typing_extensions.deprecated( - "Creating a typing.NamedTuple using keyword arguments is deprecated and support will be removed in Python 3.15" - ) + @deprecated("Creating a typing.NamedTuple using keyword arguments is deprecated and support will be removed in Python 3.15") def __init__(self, typename: str, fields: None = None, /, **kwargs: Any) -> None: ... @classmethod def _make(cls, iterable: Iterable[Any]) -> typing_extensions.Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi index b95220e033..fd0bad62e5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi @@ -377,6 +377,7 @@ else: def __init__(self, name: str, tp: AnnotationForm) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType + __name__: str if sys.version_info >= (3, 10): def __or__(self, other: Any) -> _SpecialForm: ... def __ror__(self, other: Any) -> _SpecialForm: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi index f3e58bcd1c..ef51d72129 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi @@ -199,6 +199,7 @@ class NonCallableMock(Base, Any): call_count: int call_args: _Call | MaybeNone call_args_list: _CallList + method_calls: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi index 31c830e826..763c1478f5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi @@ -1,9 +1,26 @@ from collections.abc import MutableSequence, Sequence -from typing import Any, Final, TypeVar +from typing import Any, Final, Literal, Protocol, TypeVar, type_check_only from typing_extensions import TypeAlias +@type_check_only +class _SupportsDunderLT(Protocol): + def __lt__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderGT(Protocol): + def __gt__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderLE(Protocol): + def __le__(self, other: Any, /) -> bool: ... + +@type_check_only +class _SupportsDunderGE(Protocol): + def __ge__(self, other: Any, /) -> bool: ... + _T = TypeVar("_T") _Mismatch: TypeAlias = tuple[_T, _T, int] +_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT _MAX_LENGTH: Final = 80 _PLACEHOLDER_LEN: Final = 12 @@ -18,6 +35,6 @@ def safe_repr(obj: object, short: bool = False) -> str: ... def strclass(cls: type) -> str: ... def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: ... -def three_way_cmp(x: Any, y: Any) -> int: ... +def three_way_cmp(x: _SupportsComparison, y: _SupportsComparison) -> Literal[-1, 0, 1]: ... def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi index 876b9d3f16..a00e7406ee 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi @@ -49,7 +49,14 @@ if sys.version_info < (3, 14): __all__ += ["URLopener", "FancyURLopener"] _T = TypeVar("_T") + +# The actual type is `addinfourl | HTTPResponse`, but users would need to use `typing.cast` or `isinstance` to narrow the type, +# so we use `Any` instead. +# See +# - https://github.com/python/typeshed/pull/15042 +# - https://github.com/python/typing/issues/566 _UrlopenRet: TypeAlias = Any + _DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None if sys.version_info >= (3, 13): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi index 303fb10eaf..055f4def31 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi @@ -1,7 +1,8 @@ import builtins import sys +from _typeshed import Unused from enum import Enum -from typing import Final +from typing import Final, NoReturn from typing_extensions import LiteralString, TypeAlias _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] @@ -13,6 +14,9 @@ class SafeUUID(Enum): class UUID: __slots__ = ("int", "is_safe", "__weakref__") + is_safe: Final[SafeUUID] + int: Final[builtins.int] + def __init__( self, hex: str | None = None, @@ -25,8 +29,6 @@ class UUID: is_safe: SafeUUID = SafeUUID.unknown, ) -> None: ... @property - def is_safe(self) -> SafeUUID: ... - @property def bytes(self) -> builtins.bytes: ... @property def bytes_le(self) -> builtins.bytes: ... @@ -41,8 +43,6 @@ class UUID: @property def hex(self) -> str: ... @property - def int(self) -> builtins.int: ... - @property def node(self) -> builtins.int: ... @property def time(self) -> builtins.int: ... @@ -65,6 +65,7 @@ class UUID: def __gt__(self, other: UUID) -> bool: ... def __ge__(self, other: UUID) -> bool: ... def __hash__(self) -> builtins.int: ... + def __setattr__(self, name: Unused, value: Unused) -> NoReturn: ... def getnode() -> int: ... def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi index 53457112ee..a654bbcdfb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi @@ -18,13 +18,13 @@ if sys.platform == "win32": def ExpandEnvironmentStrings(string: str, /) -> str: ... def FlushKey(key: _KeyType, /) -> None: ... def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: ... - def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... - def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKey(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131097) -> HKEYType: ... def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: ... def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: ... def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: ... def SaveKey(key: _KeyType, file_name: str, /) -> None: ... - def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: ... + def SetValue(key: _KeyType, sub_key: str | None, type: int, value: str, /) -> None: ... @overload # type=REG_DWORD|REG_QWORD def SetValueEx( key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, / diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi index d42db1bc0c..18fcaed376 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi @@ -157,6 +157,7 @@ _Root = TypeVar("_Root", Element, Element | None, default=Element | None) class ElementTree(Generic[_Root]): def __init__(self, element: Element[Any] | None = None, file: _FileRead | None = None) -> None: ... def getroot(self) -> _Root: ... + def _setroot(self, element: Element[Any]) -> None: ... def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zipfile/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zipfile/__init__.pyi index e573d04dba..df53cdedb4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zipfile/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zipfile/__init__.pyi @@ -321,6 +321,8 @@ class ZipInfo: def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... def is_dir(self) -> bool: ... def FileHeader(self, zip64: bool | None = None) -> bytes: ... + if sys.version_info >= (3, 14): + def _for_archive(self, archive: ZipFile) -> Self: ... if sys.version_info >= (3, 12): from zipfile._path import CompleteDirs as CompleteDirs, Path as Path diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi index 4e410fdd18..d5998cab90 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi @@ -26,8 +26,8 @@ Z_RLE: Final = 3 Z_SYNC_FLUSH: Final = 2 Z_TREES: Final = 6 -if sys.version_info >= (3, 14) and sys.platform == "win32": - # Available when zlib was built with zlib-ng, usually only on Windows +if sys.version_info >= (3, 14): + # Available when zlib was built with zlib-ng ZLIBNG_VERSION: Final[str] class error(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/METADATA.toml index d8cc388509..b8a8459de2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/METADATA.toml @@ -1,4 +1,3 @@ -version = "1.6.5" +version = "1.6.6" upstream_repository = "https://github.com/lepture/authlib" requires = ["cryptography"] -partial_stub = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/common/urls.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/common/urls.pyi index bc5a64570e..e4180eeb86 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/common/urls.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/common/urls.pyi @@ -13,7 +13,7 @@ def url_decode(query: str) -> _ExplodedQueryString: ... def add_params_to_qs(query: str, params: _ExplodedQueryString) -> str: ... def add_params_to_uri(uri: str, params: _ExplodedQueryString, fragment: bool = False): ... def quote(s: str, safe: bytes = b"/") -> str: ... -def unquote(s: str) -> str: ... +def unquote(s: str | bytes) -> str: ... def quote_url(s: str) -> str: ... def extract_params(raw: dict[str, str] | _ExplodedQueryString) -> _ExplodedQueryString: ... def is_valid_url(url: str, fragments_allowed: bool = True) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_app.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_app.pyi index c684655f94..352fd5815b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_app.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_app.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from logging import Logger from authlib.integrations.base_client.sync_app import OAuth1Base, OAuth2Base @@ -8,11 +9,11 @@ __all__ = ["AsyncOAuth1Mixin", "AsyncOAuth2Mixin"] class AsyncOAuth1Mixin(OAuth1Base): async def request(self, method, url, token=None, **kwargs): ... - async def create_authorization_url(self, redirect_uri=None, **kwargs): ... + async def create_authorization_url(self, redirect_uri=None, **kwargs) -> dict[Incomplete, Incomplete]: ... async def fetch_access_token(self, request_token=None, **kwargs): ... class AsyncOAuth2Mixin(OAuth2Base): - async def load_server_metadata(self): ... + async def load_server_metadata(self) -> dict[Incomplete, Incomplete]: ... async def request(self, method, url, token=None, **kwargs): ... - async def create_authorization_url(self, redirect_uri=None, **kwargs): ... + async def create_authorization_url(self, redirect_uri=None, **kwargs) -> dict[Incomplete, Incomplete]: ... async def fetch_access_token(self, redirect_uri=None, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi index d4c21c282a..702bed62fb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/async_openid.pyi @@ -1,6 +1,8 @@ +from authlib.oidc.core.claims import UserInfo + __all__ = ["AsyncOpenIDMixin"] class AsyncOpenIDMixin: async def fetch_jwk_set(self, force: bool = False): ... - async def userinfo(self, **kwargs): ... - async def parse_id_token(self, token, nonce, claims_options=None, claims_cls=None, leeway: int = 120): ... + async def userinfo(self, **kwargs) -> UserInfo: ... + async def parse_id_token(self, token, nonce, claims_options=None, claims_cls=None, leeway: int = 120) -> UserInfo: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi index c625251427..2c3c871ad6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/framework_integration.pyi @@ -8,6 +8,6 @@ class FrameworkIntegration: def get_state_data(self, session, state): ... def set_state_data(self, session, state, data): ... def clear_state_data(self, session, state): ... - def update_token(self, token, refresh_token=None, access_token=None) -> None: ... + def update_token(self, token, refresh_token=None, access_token=None): ... @staticmethod - def load_config(oauth, name, params) -> None: ... + def load_config(oauth, name, params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/registry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/registry.pyi index 24c1f415c9..657eefe452 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/registry.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/registry.pyi @@ -14,6 +14,6 @@ class BaseOAuth: def __init__(self, cache=None, fetch_token=None, update_token=None) -> None: ... def create_client(self, name): ... def register(self, name, overwrite: bool = False, **kwargs): ... - def generate_client_kwargs(self, name, overwrite, **kwargs): ... + def generate_client_kwargs(self, name, overwrite, **kwargs) -> dict[Incomplete, Incomplete]: ... def load_config(self, name, params): ... def __getattr__(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi index a2ee03bd7b..8263f99756 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_app.pyi @@ -50,7 +50,7 @@ class OAuth1Base: class OAuth1Mixin(_RequestMixin, OAuth1Base): def request(self, method, url, token=None, **kwargs): ... - def create_authorization_url(self, redirect_uri=None, **kwargs): ... + def create_authorization_url(self, redirect_uri=None, **kwargs) -> dict[Incomplete, Incomplete]: ... def fetch_access_token(self, request_token=None, **kwargs): ... class OAuth2Base: @@ -91,6 +91,6 @@ class OAuth2Base: class OAuth2Mixin(_RequestMixin, OAuth2Base): def request(self, method, url, token=None, **kwargs): ... - def load_server_metadata(self): ... - def create_authorization_url(self, redirect_uri=None, **kwargs): ... + def load_server_metadata(self) -> dict[Incomplete, Incomplete]: ... + def create_authorization_url(self, redirect_uri=None, **kwargs) -> dict[Incomplete, Incomplete]: ... def fetch_access_token(self, redirect_uri=None, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi index 9613955342..a78b26b4f6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/base_client/sync_openid.pyi @@ -1,5 +1,7 @@ +from authlib.oidc.core.claims import UserInfo + class OpenIDMixin: def fetch_jwk_set(self, force: bool = False): ... - def userinfo(self, **kwargs): ... - def parse_id_token(self, token, nonce, claims_options=None, claims_cls=None, leeway: int = 120): ... + def userinfo(self, **kwargs) -> UserInfo: ... + def parse_id_token(self, token, nonce, claims_options=None, claims_cls=None, leeway: int = 120) -> UserInfo | None: ... def create_load_key(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/__init__.pyi new file mode 100644 index 0000000000..c5c48e4777 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/__init__.pyi @@ -0,0 +1,10 @@ +from ..base_client import BaseOAuth, OAuthError as OAuthError +from .apps import DjangoOAuth1App as DjangoOAuth1App, DjangoOAuth2App as DjangoOAuth2App +from .integration import DjangoIntegration as DjangoIntegration, token_update as token_update + +class OAuth(BaseOAuth): + oauth1_client_cls = DjangoOAuth1App + oauth2_client_cls = DjangoOAuth2App + framework_integration_cls = DjangoIntegration + +__all__ = ["OAuth", "DjangoOAuth1App", "DjangoOAuth2App", "DjangoIntegration", "token_update", "OAuthError"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/apps.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/apps.pyi new file mode 100644 index 0000000000..895db81ec1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/apps.pyi @@ -0,0 +1,14 @@ +from ..base_client import BaseApp, OAuth1Mixin, OAuth2Mixin, OpenIDMixin +from ..requests_client import OAuth1Session, OAuth2Session + +class DjangoAppMixin: + def save_authorize_data(self, request, **kwargs) -> None: ... + def authorize_redirect(self, request, redirect_uri=None, **kwargs): ... + +class DjangoOAuth1App(DjangoAppMixin, OAuth1Mixin, BaseApp): + client_cls = OAuth1Session + def authorize_access_token(self, request, **kwargs): ... + +class DjangoOAuth2App(DjangoAppMixin, OAuth2Mixin, OpenIDMixin, BaseApp): + client_cls = OAuth2Session + def authorize_access_token(self, request, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/integration.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/integration.pyi new file mode 100644 index 0000000000..91469e1953 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_client/integration.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +from ..base_client import FrameworkIntegration + +# actual type is django.dispatch.Signal +token_update: Incomplete + +class DjangoIntegration(FrameworkIntegration): + def update_token(self, token, refresh_token=None, access_token=None) -> None: ... + @staticmethod + def load_config(oauth, name, params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/__init__.pyi new file mode 100644 index 0000000000..3c684184a7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/__init__.pyi @@ -0,0 +1,4 @@ +from .authorization_server import BaseServer as BaseServer, CacheAuthorizationServer as CacheAuthorizationServer +from .resource_protector import ResourceProtector as ResourceProtector + +__all__ = ["BaseServer", "CacheAuthorizationServer", "ResourceProtector"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/authorization_server.pyi new file mode 100644 index 0000000000..2d4b0bacac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/authorization_server.pyi @@ -0,0 +1,26 @@ +import logging +from _typeshed import Incomplete + +from authlib.oauth1 import AuthorizationServer as _AuthorizationServer, OAuth1Request, TemporaryCredential + +log: logging.Logger + +class BaseServer(_AuthorizationServer): + token_generator: Incomplete + client_model: Incomplete + token_model: Incomplete + SUPPORTED_SIGNATURE_METHODS: Incomplete + def __init__(self, client_model, token_model, token_generator=None) -> None: ... + def get_client_by_id(self, client_id): ... + def exists_nonce(self, nonce, request) -> bool: ... + def create_token_credential(self, request): ... + def check_authorization_request(self, request) -> OAuth1Request: ... + def create_oauth1_request(self, request) -> OAuth1Request: ... + def handle_response(self, status_code, payload, headers): ... + +class CacheAuthorizationServer(BaseServer): + def __init__(self, client_model, token_model, token_generator=None) -> None: ... + def create_temporary_credential(self, request) -> TemporaryCredential: ... + def get_temporary_credential(self, request) -> TemporaryCredential | None: ... + def delete_temporary_credential(self, request) -> None: ... + def create_authorization_verifier(self, request) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/nonce.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/nonce.pyi new file mode 100644 index 0000000000..b4d9319144 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/nonce.pyi @@ -0,0 +1 @@ +def exists_nonce_in_cache(nonce, request, timeout) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/resource_protector.pyi new file mode 100644 index 0000000000..d3e3f568d2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth1/resource_protector.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from authlib.oauth1 import ResourceProtector as _ResourceProtector + +class ResourceProtector(_ResourceProtector): + client_model: Incomplete + token_model: Incomplete + SUPPORTED_SIGNATURE_METHODS: Incomplete + def __init__(self, client_model, token_model) -> None: ... + def get_client_by_id(self, client_id): ... + def get_token_credential(self, request): ... + def exists_nonce(self, nonce, request) -> bool: ... + def acquire_credential(self, request): ... + def __call__(self, realm=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/__init__.pyi new file mode 100644 index 0000000000..d627f322cf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/__init__.pyi @@ -0,0 +1,8 @@ +from .authorization_server import AuthorizationServer as AuthorizationServer +from .endpoints import RevocationEndpoint as RevocationEndpoint +from .resource_protector import BearerTokenValidator as BearerTokenValidator, ResourceProtector as ResourceProtector +from .signals import ( + client_authenticated as client_authenticated, + token_authenticated as token_authenticated, + token_revoked as token_revoked, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/authorization_server.pyi new file mode 100644 index 0000000000..cecb3d9ae9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/authorization_server.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from authlib.oauth2 import AuthorizationServer as _AuthorizationServer +from authlib.oauth2.rfc6750 import BearerTokenGenerator + +from .requests import DjangoJsonRequest, DjangoOAuth2Request + +class AuthorizationServer(_AuthorizationServer): + client_model: Incomplete + token_model: Incomplete + def __init__(self, client_model, token_model) -> None: ... + config: Incomplete + scopes_supported: Incomplete + def load_config(self, config) -> None: ... + def query_client(self, client_id): ... + def save_token(self, token, request): ... + def create_oauth2_request(self, request) -> DjangoOAuth2Request: ... + def create_json_request(self, request) -> DjangoJsonRequest: ... + def handle_response(self, status_code, payload, headers): ... + def send_signal(self, name, *args, **kwargs) -> None: ... + def create_bearer_token_generator(self) -> BearerTokenGenerator: ... + +def create_token_generator(token_generator_conf, length: int = 42): ... +def create_token_expires_in_generator(expires_in_conf=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/endpoints.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/endpoints.pyi new file mode 100644 index 0000000000..9eb7a277a4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/endpoints.pyi @@ -0,0 +1,5 @@ +from authlib.oauth2.rfc7009 import RevocationEndpoint as _RevocationEndpoint + +class RevocationEndpoint(_RevocationEndpoint): + def query_token(self, token, token_type_hint): ... + def revoke_token(self, token, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/requests.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/requests.pyi new file mode 100644 index 0000000000..05006392b3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/requests.pyi @@ -0,0 +1,20 @@ +from authlib.oauth2.rfc6749 import JsonPayload, JsonRequest, OAuth2Payload, OAuth2Request + +class DjangoOAuth2Payload(OAuth2Payload): + def __init__(self, request) -> None: ... + +class DjangoOAuth2Request(OAuth2Request): + payload: DjangoOAuth2Payload + def __init__(self, request) -> None: ... + @property + def args(self): ... + @property + def form(self): ... + +class DjangoJsonPayload(JsonPayload): + def __init__(self, request) -> None: ... + def data(self): ... + +class DjangoJsonRequest(JsonRequest): + payload: DjangoJsonPayload + def __init__(self, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/resource_protector.pyi new file mode 100644 index 0000000000..1538f368ca --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/resource_protector.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from authlib.oauth2 import ResourceProtector as _ResourceProtector +from authlib.oauth2.rfc6750 import BearerTokenValidator as _BearerTokenValidator + +class ResourceProtector(_ResourceProtector): + def acquire_token(self, request, scopes=None, **kwargs): ... + def __call__(self, scopes=None, optional=False, **kwargs): ... + +class BearerTokenValidator(_BearerTokenValidator): + token_model: Incomplete + def __init__(self, token_model, realm=None, **extra_attributes): ... + def authenticate_token(self, token_string): ... + +def return_error_response(error): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/signals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/signals.pyi new file mode 100644 index 0000000000..c5e48ddef1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/django_oauth2/signals.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +# actual types is django.dispatch.Signal +client_authenticated: Incomplete +token_revoked: Incomplete +token_authenticated: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/__init__.pyi new file mode 100644 index 0000000000..575d8ae26f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/__init__.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from ..base_client import BaseOAuth, OAuthError as OAuthError +from .apps import FlaskOAuth1App as FlaskOAuth1App, FlaskOAuth2App as FlaskOAuth2App +from .integration import FlaskIntegration as FlaskIntegration, token_update as token_update + +class OAuth(BaseOAuth): + oauth1_client_cls = FlaskOAuth1App + oauth2_client_cls = FlaskOAuth2App + framework_integration_cls = FlaskIntegration + app: Incomplete + def __init__(self, app=None, cache=None, fetch_token=None, update_token=None): ... + cache: Incomplete + fetch_token: Incomplete + update_token: Incomplete + def init_app(self, app, cache=None, fetch_token=None, update_token=None): ... + def create_client(self, name): ... + def register(self, name, overwrite=False, **kwargs): ... + +__all__ = ["OAuth", "FlaskIntegration", "FlaskOAuth1App", "FlaskOAuth2App", "token_update", "OAuthError"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/apps.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/apps.pyi new file mode 100644 index 0000000000..b0225ab249 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/apps.pyi @@ -0,0 +1,18 @@ +from ..base_client import BaseApp, OAuth1Mixin, OAuth2Mixin, OpenIDMixin +from ..requests_client import OAuth1Session, OAuth2Session + +class FlaskAppMixin: + @property + def token(self): ... + @token.setter + def token(self, token): ... + def save_authorize_data(self, **kwargs) -> None: ... + def authorize_redirect(self, redirect_uri=None, **kwargs): ... + +class FlaskOAuth1App(FlaskAppMixin, OAuth1Mixin, BaseApp): + client_cls = OAuth1Session + def authorize_access_token(self, **kwargs): ... + +class FlaskOAuth2App(FlaskAppMixin, OAuth2Mixin, OpenIDMixin, BaseApp): + client_cls = OAuth2Session + def authorize_access_token(self, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/integration.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/integration.pyi new file mode 100644 index 0000000000..7bcacdd47d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_client/integration.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from ..base_client import FrameworkIntegration + +token_update: Incomplete + +class FlaskIntegration(FrameworkIntegration): + def update_token(self, token, refresh_token=None, access_token=None) -> None: ... + @staticmethod + def load_config(oauth, name, params) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/__init__.pyi new file mode 100644 index 0000000000..fbe0b2b0ad --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/__init__.pyi @@ -0,0 +1,7 @@ +from .authorization_server import AuthorizationServer as AuthorizationServer +from .cache import ( + create_exists_nonce_func as create_exists_nonce_func, + register_nonce_hooks as register_nonce_hooks, + register_temporary_credential_hooks as register_temporary_credential_hooks, +) +from .resource_protector import ResourceProtector as ResourceProtector, current_credential as current_credential diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/authorization_server.pyi new file mode 100644 index 0000000000..1a4f39d999 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/authorization_server.pyi @@ -0,0 +1,29 @@ +import logging +from _typeshed import Incomplete +from collections.abc import Callable + +from authlib.oauth1 import AuthorizationServer as _AuthorizationServer, OAuth1Request + +log: logging.Logger + +class AuthorizationServer(_AuthorizationServer): + app: Incomplete + query_client: Incomplete + token_generator: Incomplete + def __init__(self, app=None, query_client=None, token_generator=None): ... + SUPPORTED_SIGNATURE_METHODS: Incomplete + def init_app(self, app, query_client=None, token_generator=None): ... + def register_hook(self, name, func) -> None: ... + def create_token_generator(self, app) -> Callable[[], dict[str, str]]: ... + def get_client_by_id(self, client_id): ... + def exists_nonce(self, nonce, request): ... + def create_temporary_credential(self, request): ... + def get_temporary_credential(self, request): ... + def delete_temporary_credential(self, request): ... + def create_authorization_verifier(self, request): ... + def create_token_credential(self, request): ... + def check_authorization_request(self) -> OAuth1Request: ... + def create_authorization_response(self, request=None, grant_user=None): ... + def create_token_response(self, request=None): ... + def create_oauth1_request(self, request) -> OAuth1Request: ... + def handle_response(self, status_code, payload, headers): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/cache.pyi new file mode 100644 index 0000000000..a88e4f9bcd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/cache.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete +from collections.abc import Callable + +def register_temporary_credential_hooks(authorization_server, cache, key_prefix: str = "temporary_credential:") -> None: ... +def create_exists_nonce_func( + cache, key_prefix="nonce:", expires=86400 +) -> Callable[[Incomplete, Incomplete, Incomplete, Incomplete], Incomplete]: ... +def register_nonce_hooks(authorization_server, cache, key_prefix: str = "nonce:", expires=86400) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/resource_protector.pyi new file mode 100644 index 0000000000..dec0175c30 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth1/resource_protector.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from authlib.oauth1 import ResourceProtector as _ResourceProtector + +class ResourceProtector(_ResourceProtector): + app: Incomplete + query_client: Incomplete + query_token: Incomplete + def __init__(self, app=None, query_client=None, query_token=None, exists_nonce=None) -> None: ... + SUPPORTED_SIGNATURE_METHODS: Incomplete + def init_app(self, app, query_client=None, query_token=None, exists_nonce=None): ... + def get_client_by_id(self, client_id): ... + def get_token_credential(self, request): ... + def exists_nonce(self, nonce, request): ... + def acquire_credential(self): ... + def __call__(self, scope=None): ... + +current_credential: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/__init__.pyi new file mode 100644 index 0000000000..ba510bf3a3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/__init__.pyi @@ -0,0 +1,7 @@ +from .authorization_server import AuthorizationServer as AuthorizationServer +from .resource_protector import ResourceProtector as ResourceProtector, current_token as current_token +from .signals import ( + client_authenticated as client_authenticated, + token_authenticated as token_authenticated, + token_revoked as token_revoked, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/authorization_server.pyi new file mode 100644 index 0000000000..3856935af6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/authorization_server.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from authlib.oauth2 import AuthorizationServer as _AuthorizationServer +from authlib.oauth2.rfc6750 import BearerTokenGenerator + +from .requests import FlaskJsonRequest, FlaskOAuth2Request + +class AuthorizationServer(_AuthorizationServer): + def __init__(self, app=None, query_client=None, save_token=None) -> None: ... + def init_app(self, app, query_client=None, save_token=None) -> None: ... + scopes_supported: Incomplete + def load_config(self, config) -> None: ... + def query_client(self, client_id): ... + def save_token(self, token, request): ... + def get_error_uri(self, request, error): ... + def create_oauth2_request(self, request) -> FlaskOAuth2Request: ... + def create_json_request(self, request) -> FlaskJsonRequest: ... + def handle_response(self, status_code, payload, headers): ... + def send_signal(self, name, *args, **kwargs) -> None: ... + def create_bearer_token_generator(self, config) -> BearerTokenGenerator: ... + +def create_token_expires_in_generator(expires_in_conf=None): ... +def create_token_generator(token_generator_conf, length: int = 42): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/errors.pyi new file mode 100644 index 0000000000..258839271b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/errors.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import NoReturn + +# Inherits from werkzeug.exceptions.HTTPException +class _HTTPException: + code: Incomplete + body: Incomplete + headers: Incomplete + def __init__(self, code, body, headers, response=None) -> None: ... + # Params depends on `werkzeug` package version + def get_body(self, environ=None, scope=None): ... + def get_headers(self, environ=None, scope=None): ... + +def raise_http_exception(status, body, headers) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/requests.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/requests.pyi new file mode 100644 index 0000000000..f2e657d31a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/requests.pyi @@ -0,0 +1,27 @@ +from functools import cached_property + +from authlib.oauth2.rfc6749 import JsonPayload, JsonRequest, OAuth2Payload, OAuth2Request + +class FlaskOAuth2Payload(OAuth2Payload): + def __init__(self, request) -> None: ... + @property + def data(self): ... + @cached_property + def datalist(self): ... + +class FlaskOAuth2Request(OAuth2Request): + payload: FlaskOAuth2Payload + def __init__(self, request) -> None: ... + @property + def args(self): ... + @property + def form(self): ... + +class FlaskJsonPayload(JsonPayload): + def __init__(self, request) -> None: ... + @property + def data(self): ... + +class FlaskJsonRequest(JsonRequest): + payload: FlaskJsonPayload + def __init__(self, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/resource_protector.pyi new file mode 100644 index 0000000000..a4c4928a04 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/resource_protector.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from contextlib import contextmanager +from typing import NoReturn + +from authlib.oauth2 import ResourceProtector as _ResourceProtector + +class ResourceProtector(_ResourceProtector): + def raise_error_response(self, error) -> NoReturn: ... + def acquire_token(self, scopes=None, **kwargs): ... + @contextmanager + def acquire(self, scopes=None) -> Generator[Incomplete]: ... + def __call__(self, scopes=None, optional=False, **kwargs): ... + +current_token: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/signals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/signals.pyi new file mode 100644 index 0000000000..f64d46c1d0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/flask_oauth2/signals.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +client_authenticated: Incomplete +token_revoked: Incomplete +token_authenticated: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/__init__.pyi new file mode 100644 index 0000000000..58b88a90fb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/__init__.pyi @@ -0,0 +1,37 @@ +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1 as SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT as SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1 as SIGNATURE_RSA_SHA1, + SIGNATURE_TYPE_BODY as SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_HEADER as SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY as SIGNATURE_TYPE_QUERY, +) + +from ..base_client import OAuthError as OAuthError +from .assertion_client import AssertionClient as AssertionClient, AsyncAssertionClient as AsyncAssertionClient +from .oauth1_client import AsyncOAuth1Client as AsyncOAuth1Client, OAuth1Auth as OAuth1Auth, OAuth1Client as OAuth1Client +from .oauth2_client import ( + AsyncOAuth2Client as AsyncOAuth2Client, + OAuth2Auth as OAuth2Auth, + OAuth2Client as OAuth2Client, + OAuth2ClientAuth as OAuth2ClientAuth, +) + +__all__ = [ + "OAuthError", + "OAuth1Auth", + "AsyncOAuth1Client", + "OAuth1Client", + "SIGNATURE_HMAC_SHA1", + "SIGNATURE_RSA_SHA1", + "SIGNATURE_PLAINTEXT", + "SIGNATURE_TYPE_HEADER", + "SIGNATURE_TYPE_QUERY", + "SIGNATURE_TYPE_BODY", + "OAuth2Auth", + "OAuth2ClientAuth", + "OAuth2Client", + "AsyncOAuth2Client", + "AssertionClient", + "AsyncAssertionClient", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/assertion_client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/assertion_client.pyi new file mode 100644 index 0000000000..f2061d98ae --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/assertion_client.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete + +from authlib.oauth2.rfc7521 import AssertionClient as _AssertionClient + +from ..base_client import OAuthError +from .oauth2_client import OAuth2Auth + +__all__ = ["AsyncAssertionClient"] + +# Inherits from httpx.AsyncClient +class AsyncAssertionClient(_AssertionClient): + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError # type: ignore[assignment] + JWT_BEARER_GRANT_TYPE: Incomplete + ASSERTION_METHODS: Incomplete + DEFAULT_GRANT_TYPE: Incomplete + def __init__( + self, + token_endpoint, + issuer, + subject, + audience=None, + grant_type=None, + claims=None, + token_placement="header", + scope=None, + **kwargs, + ) -> None: ... + async def request(self, method, url, withhold_token=False, auth=..., **kwargs): ... + +# Inherits from httpx.Client +class AssertionClient(_AssertionClient): + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError # type: ignore[assignment] + JWT_BEARER_GRANT_TYPE: Incomplete + ASSERTION_METHODS: Incomplete + DEFAULT_GRANT_TYPE: Incomplete + def __init__( + self, + token_endpoint, + issuer, + subject, + audience=None, + grant_type=None, + claims=None, + token_placement="header", + scope=None, + **kwargs, + ) -> None: ... + def request(self, method, url, withhold_token=False, auth=..., **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth1_client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth1_client.pyi new file mode 100644 index 0000000000..1160db3b44 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth1_client.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from typing import NoReturn +from typing_extensions import TypeAlias + +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client as _OAuth1Client + +_Response: TypeAlias = Incomplete # actual type is httpx.Response +_Request: TypeAlias = Incomplete # actual type is httpx.Request + +# Inherits from httpx.Auth +class OAuth1Auth(ClientAuth): + requires_request_body: bool + def auth_flow(self, request: _Request) -> Generator[_Request, _Response, None]: ... + +# Inherits from httpx.AsyncClient +class AsyncOAuth1Client(_OAuth1Client): + auth_class = OAuth1Auth + def __init__( + self, + client_id, + client_secret=None, + token=None, + token_secret=None, + redirect_uri=None, + rsa_key=None, + verifier=None, + signature_method=..., + signature_type=..., + force_include_body=False, + **kwargs, + ) -> None: ... + async def fetch_access_token(self, url, verifier=None, **kwargs): ... + @staticmethod + def handle_error(error_type: str | None, error_description: str | None) -> NoReturn: ... + +# Inherits from httpx.Client +class OAuth1Client(_OAuth1Client): + auth_class = OAuth1Auth + def __init__( + self, + client_id, + client_secret=None, + token=None, + token_secret=None, + redirect_uri=None, + rsa_key=None, + verifier=None, + signature_method=..., + signature_type=..., + force_include_body=False, + **kwargs, + ) -> None: ... + @staticmethod + def handle_error(error_type: str | None, error_description: str | None) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth2_client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth2_client.pyi new file mode 100644 index 0000000000..79d45bd3c2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/oauth2_client.pyi @@ -0,0 +1,72 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from typing import NoReturn +from typing_extensions import TypeAlias + +from authlib.oauth2.auth import ClientAuth, TokenAuth +from authlib.oauth2.client import OAuth2Client as _OAuth2Client + +from ..base_client import OAuthError + +__all__ = ["OAuth2Auth", "OAuth2ClientAuth", "AsyncOAuth2Client", "OAuth2Client"] + +_Response: TypeAlias = Incomplete # actual type is httpx.Response +_Request: TypeAlias = Incomplete # actual type is httpx.Request + +# Inherits from httpx.Auth +class OAuth2Auth(TokenAuth): + requires_request_body: bool + def auth_flow(self, request: _Request) -> Generator[_Request, _Response, None]: ... + +# Inherits from httpx.Auth +class OAuth2ClientAuth(ClientAuth): + requires_request_body: bool + def auth_flow(self, request: _Request) -> Generator[_Request, _Response, None]: ... + +# Inherits from httpx.AsyncClient +class AsyncOAuth2Client(_OAuth2Client): + SESSION_REQUEST_PARAMS: list[str] + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError # type: ignore[assignment] + def __init__( + self, + client_id=None, + client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, + redirect_uri=None, + token=None, + token_placement="header", + update_token=None, + leeway=60, + **kwargs, + ) -> None: ... + async def request(self, method, url, withhold_token: bool = False, auth=..., **kwargs): ... + async def stream(self, method, url, withhold_token: bool = False, auth=..., **kwargs) -> Generator[Incomplete]: ... + async def ensure_active_token(self, token): ... # type: ignore[override] + +# Inherits from httpx.Client +class OAuth2Client(_OAuth2Client): + SESSION_REQUEST_PARAMS: list[str] + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError # type: ignore[assignment] + def __init__( + self, + client_id=None, + client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, + redirect_uri=None, + token=None, + token_placement="header", + update_token=None, + **kwargs, + ) -> None: ... + @staticmethod + def handle_error(error_type: str | None, error_description: str | None) -> NoReturn: ... + def request(self, method, url, withhold_token: bool = False, auth=..., **kwargs): ... + def stream(self, method, url, withhold_token: bool = False, auth=..., **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/utils.pyi new file mode 100644 index 0000000000..d4e4547e44 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/httpx_client/utils.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from typing import Final + +HTTPX_CLIENT_KWARGS: Final[list[str]] + +def extract_client_kwargs(kwargs) -> dict[str, Incomplete]: ... +def build_request(url, headers, body, initial_request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/__init__.pyi new file mode 100644 index 0000000000..fc7b705e0c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/__init__.pyi @@ -0,0 +1,28 @@ +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1 as SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT as SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1 as SIGNATURE_RSA_SHA1, + SIGNATURE_TYPE_BODY as SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_HEADER as SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY as SIGNATURE_TYPE_QUERY, +) + +from ..base_client import OAuthError as OAuthError +from .assertion_session import AssertionSession as AssertionSession +from .oauth1_session import OAuth1Auth as OAuth1Auth, OAuth1Session as OAuth1Session +from .oauth2_session import OAuth2Auth as OAuth2Auth, OAuth2Session as OAuth2Session + +__all__ = [ + "OAuthError", + "OAuth1Session", + "OAuth1Auth", + "SIGNATURE_HMAC_SHA1", + "SIGNATURE_RSA_SHA1", + "SIGNATURE_PLAINTEXT", + "SIGNATURE_TYPE_HEADER", + "SIGNATURE_TYPE_QUERY", + "SIGNATURE_TYPE_BODY", + "OAuth2Session", + "OAuth2Auth", + "AssertionSession", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/assertion_session.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/assertion_session.pyi new file mode 100644 index 0000000000..87f9ae0043 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/assertion_session.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +from authlib.oauth2.rfc7521 import AssertionClient + +from .oauth2_session import OAuth2Auth + +class AssertionAuth(OAuth2Auth): + def ensure_active_token(self): ... + +# Inherits from requests.Session +class AssertionSession(AssertionClient): + token_auth_class = AssertionAuth + JWT_BEARER_GRANT_TYPE: Incomplete + ASSERTION_METHODS: Incomplete + DEFAULT_GRANT_TYPE: Incomplete + default_timeout: Incomplete + def __init__( + self, + token_endpoint, + issuer, + subject, + audience=None, + grant_type=None, + claims=None, + token_placement="header", + scope=None, + default_timeout=None, + leeway=60, + **kwargs, + ) -> None: ... + def request(self, method, url, withhold_token=False, auth=None, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth1_session.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth1_session.pyi new file mode 100644 index 0000000000..deead135e9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth1_session.pyi @@ -0,0 +1,29 @@ +from typing import NoReturn + +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client + +# Inherits from requests.auth.AuthBase +class OAuth1Auth(ClientAuth): + def __call__(self, req): ... + +# Inherits from requests.Session +class OAuth1Session(OAuth1Client): + auth_class = OAuth1Auth + def __init__( + self, + client_id, + client_secret=None, + token=None, + token_secret=None, + redirect_uri=None, + rsa_key=None, + verifier=None, + signature_method=..., + signature_type=..., + force_include_body=False, + **kwargs, + ) -> None: ... + def rebuild_auth(self, prepared_request, response) -> None: ... + @staticmethod + def handle_error(error_type: str | None, error_description: str | None) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth2_session.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth2_session.pyi new file mode 100644 index 0000000000..63cae01d2b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/oauth2_session.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +from authlib.oauth2.auth import ClientAuth, TokenAuth +from authlib.oauth2.client import OAuth2Client + +from ..base_client import OAuthError + +__all__ = ["OAuth2Session", "OAuth2Auth"] + +# Inherits from requests.auth.AuthBase +class OAuth2Auth(TokenAuth): + def ensure_active_token(self) -> None: ... + def __call__(self, req): ... + +# Inherits from requests.auth.AuthBase +class OAuth2ClientAuth(ClientAuth): + def __call__(self, req): ... + +# Inherits from requests.Session +class OAuth2Session(OAuth2Client): + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError # type: ignore[assignment] + SESSION_REQUEST_PARAMS: tuple[str, ...] + default_timeout: Incomplete + def __init__( + self, + client_id=None, + client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, + state=None, + redirect_uri=None, + token=None, + token_placement="header", + update_token=None, + leeway=60, + default_timeout=None, + **kwargs, + ) -> None: ... + def fetch_access_token(self, url=None, **kwargs): ... + def request(self, method, url, withhold_token=False, auth=None, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/utils.pyi new file mode 100644 index 0000000000..f93f9a06f0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/requests_client/utils.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete +from typing import Final + +REQUESTS_SESSION_KWARGS: Final = ["proxies", "hooks", "stream", "verify", "cert", "max_redirects", "trust_env"] + +def update_session_configure(session, kwargs: dict[str, Incomplete]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/__init__.pyi new file mode 100644 index 0000000000..364a0a9fb2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/__init__.pyi @@ -0,0 +1,20 @@ +from .client_mixin import OAuth2ClientMixin as OAuth2ClientMixin +from .functions import ( + create_bearer_token_validator as create_bearer_token_validator, + create_query_client_func as create_query_client_func, + create_query_token_func as create_query_token_func, + create_revocation_endpoint as create_revocation_endpoint, + create_save_token_func as create_save_token_func, +) +from .tokens_mixins import OAuth2AuthorizationCodeMixin as OAuth2AuthorizationCodeMixin, OAuth2TokenMixin as OAuth2TokenMixin + +__all__ = [ + "OAuth2ClientMixin", + "OAuth2AuthorizationCodeMixin", + "OAuth2TokenMixin", + "create_query_client_func", + "create_save_token_func", + "create_query_token_func", + "create_revocation_endpoint", + "create_bearer_token_validator", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/client_mixin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/client_mixin.pyi new file mode 100644 index 0000000000..94606b7d19 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/client_mixin.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete + +from authlib.oauth2.rfc6749 import ClientMixin + +class OAuth2ClientMixin(ClientMixin): + client_id: Incomplete + client_secret: Incomplete + client_id_issued_at: Incomplete + client_secret_expires_at: Incomplete + _client_metadata: Incomplete + @property + def client_info(self) -> dict[str, Incomplete]: ... + @property + def client_metadata(self): ... + def set_client_metadata(self, value) -> None: ... + @property + def redirect_uris(self): ... + @property + def token_endpoint_auth_method(self): ... + @property + def grant_types(self): ... + @property + def response_types(self): ... + @property + def client_name(self): ... + @property + def client_uri(self): ... + @property + def logo_uri(self): ... + @property + def scope(self): ... + @property + def contacts(self): ... + @property + def tos_uri(self): ... + @property + def policy_uri(self): ... + @property + def jwks_uri(self): ... + @property + def jwks(self): ... + @property + def software_id(self): ... + @property + def software_version(self): ... + @property + def id_token_signed_response_alg(self): ... + def get_client_id(self): ... + def get_default_redirect_uri(self): ... + def get_allowed_scope(self, scope) -> str: ... + def check_redirect_uri(self, redirect_uri) -> bool: ... + def check_client_secret(self, client_secret) -> bool: ... + def check_endpoint_auth_method(self, method, endpoint) -> bool: ... + def check_response_type(self, response_type) -> bool: ... + def check_grant_type(self, grant_type) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/functions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/functions.pyi new file mode 100644 index 0000000000..0590b36eda --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/functions.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import type_check_only + +from authlib.oauth2.rfc6750 import BearerTokenValidator +from authlib.oauth2.rfc7009 import RevocationEndpoint + +@type_check_only +class _RevocationEndpoint(RevocationEndpoint): + def query_token(self, token, token_type_hint): ... + def revoke_token(self, token, request) -> None: ... + +@type_check_only +class _BearerTokenValidator(BearerTokenValidator): + def authenticate_token(self, token_string): ... + +def create_query_client_func(session, client_model) -> Callable[[Incomplete], Incomplete]: ... +def create_save_token_func(session, token_model) -> Callable[[Incomplete, Incomplete], None]: ... +def create_query_token_func(session, token_model) -> Callable[[Incomplete, Incomplete], Incomplete]: ... +def create_revocation_endpoint(session, token_model) -> type[_RevocationEndpoint]: ... +def create_bearer_token_validator(session, token_model) -> type[_BearerTokenValidator]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/tokens_mixins.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/tokens_mixins.pyi new file mode 100644 index 0000000000..25166d5f0f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/sqla_oauth2/tokens_mixins.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from authlib.oauth2.rfc6749 import AuthorizationCodeMixin, TokenMixin + +class OAuth2AuthorizationCodeMixin(AuthorizationCodeMixin): + code: Incomplete + client_id: Incomplete + redirect_uri: Incomplete + response_type: Incomplete + scope: Incomplete + nonce: Incomplete + auth_time: Incomplete + acr: Incomplete + amr: Incomplete + code_challenge: Incomplete + code_challenge_method: Incomplete + def is_expired(self) -> bool: ... + def get_redirect_uri(self): ... + def get_scope(self): ... + def get_auth_time(self): ... + def get_acr(self): ... + def get_amr(self): ... + def get_nonce(self): ... + +class OAuth2TokenMixin(TokenMixin): + client_id: Incomplete + token_type: Incomplete + access_token: Incomplete + refresh_token: Incomplete + scope: Incomplete + issued_at: Incomplete + access_token_revoked_at: Incomplete + refresh_token_revoked_at: Incomplete + expires_in: Incomplete + def check_client(self, client) -> bool: ... + def get_scope(self): ... + def get_expires_in(self): ... + def is_revoked(self): ... + def is_expired(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/__init__.pyi new file mode 100644 index 0000000000..ea654f6f55 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/__init__.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from ..base_client import BaseOAuth, OAuthError as OAuthError +from .apps import StarletteOAuth1App as StarletteOAuth1App, StarletteOAuth2App as StarletteOAuth2App +from .integration import StarletteIntegration as StarletteIntegration + +class OAuth(BaseOAuth): + oauth1_client_cls = StarletteOAuth1App + oauth2_client_cls = StarletteOAuth2App + framework_integration_cls = StarletteIntegration + config: Incomplete + def __init__(self, config=None, cache=None, fetch_token=None, update_token=None) -> None: ... + +__all__ = ["OAuth", "OAuthError", "StarletteIntegration", "StarletteOAuth1App", "StarletteOAuth2App"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/apps.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/apps.pyi new file mode 100644 index 0000000000..c5118f6382 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/apps.pyi @@ -0,0 +1,16 @@ +from ..base_client import BaseApp +from ..base_client.async_app import AsyncOAuth1Mixin, AsyncOAuth2Mixin +from ..base_client.async_openid import AsyncOpenIDMixin +from ..httpx_client import AsyncOAuth1Client, AsyncOAuth2Client + +class StarletteAppMixin: + async def save_authorize_data(self, request, **kwargs) -> None: ... + async def authorize_redirect(self, request, redirect_uri=None, **kwargs): ... + +class StarletteOAuth1App(StarletteAppMixin, AsyncOAuth1Mixin, BaseApp): + client_cls = AsyncOAuth1Client + async def authorize_access_token(self, request, **kwargs): ... + +class StarletteOAuth2App(StarletteAppMixin, AsyncOAuth2Mixin, AsyncOpenIDMixin, BaseApp): + client_cls = AsyncOAuth2Client + async def authorize_access_token(self, request, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/integration.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/integration.pyi new file mode 100644 index 0000000000..6848c50141 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/integrations/starlette_client/integration.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +from ..base_client import FrameworkIntegration + +class StarletteIntegration(FrameworkIntegration): + # annotated by source code + async def get_state_data(self, session: dict[str, Any] | None, state: str) -> dict[str, Any]: ... + async def set_state_data(self, session: dict[str, Any] | None, state: str, data: Any) -> None: ... + async def clear_state_data(self, session: dict[str, Any] | None, state: str) -> None: ... + def update_token(self, token, refresh_token=None, access_token=None) -> None: ... + @staticmethod + def load_config(oauth, name, params) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi index 06983be25d..92784727b9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_algorithms.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Iterable -from typing import ClassVar +from typing import ClassVar, Final from authlib.jose.rfc7516 import JWEAlgorithmWithTagAwareKeyAgreement @@ -13,17 +13,19 @@ class ECDH1PUAlgorithm(JWEAlgorithmWithTagAwareKeyAgreement): aeskw: Incomplete def __init__(self, key_size=None) -> None: ... def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... + def generate_preset(self, enc_alg, key) -> dict[str, Incomplete]: ... def compute_shared_key(self, shared_key_e, shared_key_s): ... - def compute_fixed_info(self, headers, bit_size, tag): ... - def compute_derived_key(self, shared_key, fixed_info, bit_size): ... - def deliver_at_sender(self, sender_static_key, sender_ephemeral_key, recipient_pubkey, headers, bit_size, tag): ... - def deliver_at_recipient(self, recipient_key, sender_static_pubkey, sender_ephemeral_pubkey, headers, bit_size, tag): ... - def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None): ... + def compute_fixed_info(self, headers, bit_size, tag) -> bytes: ... + def compute_derived_key(self, shared_key, fixed_info, bit_size) -> bytes: ... + def deliver_at_sender(self, sender_static_key, sender_ephemeral_key, recipient_pubkey, headers, bit_size, tag) -> bytes: ... + def deliver_at_recipient( + self, recipient_key, sender_static_pubkey, sender_ephemeral_pubkey, headers, bit_size, tag + ) -> bytes: ... + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None) -> dict[str, Incomplete]: ... def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag): ... - def wrap(self, enc_alg, headers, key, sender_key, preset=None): ... - def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None): ... + def wrap(self, enc_alg, headers, key, sender_key, preset=None) -> dict[str, Incomplete]: ... + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None) -> bytes: ... -JWE_DRAFT_ALG_ALGORITHMS: Incomplete +JWE_DRAFT_ALG_ALGORITHMS: Final[list[ECDH1PUAlgorithm]] def register_jwe_alg_draft(cls) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_enc_cryptography.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_enc_cryptography.pyi index 0acb4ebb77..ab85a51a52 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_enc_cryptography.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/drafts/_jwe_enc_cryptography.pyi @@ -9,5 +9,5 @@ class C20PEncAlgorithm(JWEEncAlgorithm): key_size: Incomplete CEK_SIZE: Incomplete def __init__(self, key_size) -> None: ... - def encrypt(self, msg, aad, iv, key): ... - def decrypt(self, ciphertext, aad, iv, tag, key): ... + def encrypt(self, msg, aad, iv, key) -> tuple[bytes, bytes]: ... + def decrypt(self, ciphertext, aad, iv, tag, key) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/jwk.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/jwk.pyi index 79246560f4..4a90a1bd2e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/jwk.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/jwk.pyi @@ -1,2 +1,7 @@ +from _typeshed import Incomplete +from typing_extensions import deprecated + +@deprecated("Please use `JsonWebKey` directly.") def loads(obj, kid=None): ... -def dumps(key, kty=None, **params): ... +@deprecated("Please use `JsonWebKey` directly.") +def dumps(key, kty=None, **params) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/jws.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/jws.pyi index 04cc886a1c..7868d7fceb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/jws.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/jws.pyi @@ -1,5 +1,7 @@ from _typeshed import Incomplete +from .models import JWSObject + class JsonWebSignature: REGISTERED_HEADER_PARAMETER_NAMES: frozenset[str] MAX_CONTENT_LENGTH: int @@ -7,9 +9,9 @@ class JsonWebSignature: def __init__(self, algorithms=None, private_headers=None) -> None: ... @classmethod def register_algorithm(cls, algorithm) -> None: ... - def serialize_compact(self, protected, payload, key): ... - def deserialize_compact(self, s, key, decode=None): ... + def serialize_compact(self, protected, payload, key) -> bytes: ... + def deserialize_compact(self, s, key, decode=None) -> JWSObject: ... def serialize_json(self, header_obj, payload, key): ... - def deserialize_json(self, obj, key, decode=None): ... + def deserialize_json(self, obj, key, decode=None) -> JWSObject: ... def serialize(self, header, payload, key): ... - def deserialize(self, s, key, decode=None): ... + def deserialize(self, s, key, decode=None) -> JWSObject: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/models.pyi index 07f9a2091a..f1a68a55d4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7515/models.pyi @@ -1,11 +1,12 @@ from _typeshed import Incomplete +from typing_extensions import Self class JWSAlgorithm: name: Incomplete description: Incomplete algorithm_type: str algorithm_location: str - def prepare_key(self, raw_data) -> None: ... + def prepare_key(self, raw_data): ... def sign(self, msg, key): ... def verify(self, msg, sig, key) -> bool: ... @@ -14,7 +15,7 @@ class JWSHeader(dict[str, object]): header: Incomplete def __init__(self, protected, header) -> None: ... @classmethod - def from_dict(cls, obj): ... + def from_dict(cls, obj) -> Self: ... class JWSObject(dict[str, object]): header: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi index 34e57122b8..e7b3d9a068 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/jwe.pyi @@ -1,21 +1,22 @@ from _typeshed import Incomplete +from collections import OrderedDict class JsonWebEncryption: - REGISTERED_HEADER_PARAMETER_NAMES: Incomplete - ALG_REGISTRY: Incomplete - ENC_REGISTRY: Incomplete - ZIP_REGISTRY: Incomplete + REGISTERED_HEADER_PARAMETER_NAMES: frozenset[str] + ALG_REGISTRY: dict[Incomplete, Incomplete] + ENC_REGISTRY: dict[Incomplete, Incomplete] + ZIP_REGISTRY: dict[Incomplete, Incomplete] def __init__(self, algorithms=None, private_headers=None) -> None: ... @classmethod def register_algorithm(cls, algorithm) -> None: ... - def serialize_compact(self, protected, payload, key, sender_key=None): ... - def serialize_json(self, header_obj, payload, keys, sender_key=None): ... + def serialize_compact(self, protected, payload, key, sender_key=None) -> bytes: ... + def serialize_json(self, header_obj, payload, keys, sender_key=None) -> OrderedDict[Incomplete, Incomplete]: ... def serialize(self, header, payload, key, sender_key=None): ... def deserialize_compact(self, s, key, decode=None, sender_key=None): ... - def deserialize_json(self, obj, key, decode=None, sender_key=None): ... + def deserialize_json(self, obj, key, decode=None, sender_key=None) -> dict[str, Incomplete]: ... def deserialize(self, obj, key, decode=None, sender_key=None): ... @staticmethod - def parse_json(obj): ... + def parse_json(obj) -> dict[Incomplete, Incomplete]: ... def get_header_alg(self, header): ... def get_header_enc(self, header): ... def get_header_zip(self, header): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/models.pyi index 533be13093..b93d5b6c22 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7516/models.pyi @@ -2,6 +2,7 @@ from _typeshed import Incomplete from abc import ABCMeta from collections.abc import Iterable from typing import ClassVar +from typing_extensions import Self class JWEAlgorithmBase(metaclass=ABCMeta): EXTRA_HEADERS: ClassVar[Iterable[str] | None] @@ -9,18 +10,18 @@ class JWEAlgorithmBase(metaclass=ABCMeta): description: str | None algorithm_type: str algorithm_location: str - def prepare_key(self, raw_data) -> None: ... - def generate_preset(self, enc_alg, key) -> None: ... + def prepare_key(self, raw_data): ... + def generate_preset(self, enc_alg, key): ... class JWEAlgorithm(JWEAlgorithmBase, metaclass=ABCMeta): - def wrap(self, enc_alg, headers, key, preset=None) -> None: ... - def unwrap(self, enc_alg, ek, headers, key) -> None: ... + def wrap(self, enc_alg, headers, key, preset=None): ... + def unwrap(self, enc_alg, ek, headers, key): ... class JWEAlgorithmWithTagAwareKeyAgreement(JWEAlgorithmBase, metaclass=ABCMeta): - def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None) -> None: ... - def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag) -> None: ... - def wrap(self, enc_alg, headers, key, sender_key, preset=None) -> None: ... - def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None) -> None: ... + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None): ... + def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag): ... + def wrap(self, enc_alg, headers, key, sender_key, preset=None): ... + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None): ... class JWEEncAlgorithm: name: str | None @@ -32,8 +33,8 @@ class JWEEncAlgorithm: def generate_cek(self): ... def generate_iv(self): ... def check_iv(self, iv) -> None: ... - def encrypt(self, msg, aad, iv, key) -> None: ... - def decrypt(self, ciphertext, aad, iv, tag, key) -> None: ... + def encrypt(self, msg, aad, iv, key): ... + def decrypt(self, ciphertext, aad, iv, tag, key): ... class JWEZipAlgorithm: name: Incomplete @@ -49,7 +50,7 @@ class JWESharedHeader(dict[str, object]): def __init__(self, protected, unprotected) -> None: ... def update_protected(self, addition) -> None: ... @classmethod - def from_dict(cls, obj): ... + def from_dict(cls, obj) -> Self: ... class JWEHeader(dict[str, object]): protected: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi index 9c2d57f6ca..86a5476b89 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/_cryptography_key.pyi @@ -1 +1,35 @@ -def load_pem_key(raw, ssh_type=None, key_type=None, password=None): ... +from _typeshed import ReadableBuffer +from collections.abc import Iterable +from typing import Literal, SupportsBytes, SupportsIndex, overload + +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes, PublicKeyTypes +from cryptography.hazmat.primitives.serialization.ssh import SSHPublicKeyTypes + +@overload # if ssh_type is None +def load_pem_key( + raw: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, + ssh_type: None = None, + key_type: str | None = None, + password: bytes | None = None, +) -> PublicKeyTypes | PrivateKeyTypes: ... +@overload # if key_type == "public" +def load_pem_key( + raw: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, + ssh_type: ReadableBuffer | tuple[ReadableBuffer, ...] | None = None, + key_type: Literal["public"] = ..., + password: bytes | None = None, +) -> PublicKeyTypes: ... +@overload # if key_type is not empty, but not "public" +def load_pem_key( + raw: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, + ssh_type: ReadableBuffer | tuple[ReadableBuffer, ...] | None = None, + key_type: str = ..., + password: bytes | None = None, +) -> PrivateKeyTypes: ... +@overload # if ssh_type is not empty +def load_pem_key( + raw: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, + ssh_type: ReadableBuffer | tuple[ReadableBuffer, ...] = ..., + key_type: str | None = None, + password: bytes | None = None, +) -> SSHPublicKeyTypes | PublicKeyTypes | PrivateKeyTypes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi index d5caad3414..32b792bf6e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/asymmetric_key.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from typing import ClassVar +from typing_extensions import Self from authlib.jose.rfc7517 import Key @@ -13,7 +14,7 @@ class AsymmetricKey(Key): public_key: Incomplete def __init__(self, private_key=None, public_key=None, options=None) -> None: ... @property - def public_only(self): ... + def public_only(self) -> bool: ... def get_op_key(self, operation): ... def get_public_key(self): ... def get_private_key(self): ... @@ -23,16 +24,16 @@ class AsymmetricKey(Key): def dumps_public_key(self): ... def load_private_key(self): ... def load_public_key(self): ... - def as_dict(self, is_private: bool = False, **params): ... + def as_dict(self, is_private: bool = False, **params) -> dict[Incomplete, Incomplete]: ... def as_key(self, is_private: bool = False): ... def as_bytes(self, encoding=None, is_private: bool = False, password=None): ... def as_pem(self, is_private: bool = False, password=None): ... def as_der(self, is_private: bool = False, password=None): ... @classmethod - def import_dict_key(cls, raw, options=None): ... + def import_dict_key(cls, raw, options=None) -> Self: ... @classmethod - def import_key(cls, raw, options=None): ... + def import_key(cls, raw, options=None) -> Self: ... @classmethod - def validate_raw_key(cls, key): ... + def validate_raw_key(cls, key) -> bool: ... @classmethod def generate_key(cls, crv_or_size, options=None, is_private: bool = False) -> AsymmetricKey: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi index 29450b4b45..414254efe0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/base_key.pyi @@ -10,20 +10,20 @@ class Key: options: Incomplete def __init__(self, options=None) -> None: ... @property - def tokens(self): ... + def tokens(self) -> dict[Incomplete, Incomplete]: ... @property def kid(self): ... def keys(self): ... def __getitem__(self, item): ... @property - def public_only(self) -> None: ... - def load_raw_key(self) -> None: ... - def load_dict_key(self) -> None: ... + def public_only(self): ... + def load_raw_key(self): ... + def load_dict_key(self): ... def check_key_op(self, operation) -> None: ... - def as_dict(self, is_private: bool = False, **params) -> None: ... - def as_json(self, is_private: bool = False, **params): ... - def thumbprint(self): ... + def as_dict(self, is_private: bool = False, **params): ... + def as_json(self, is_private: bool = False, **params) -> str: ... + def thumbprint(self) -> str: ... @classmethod def check_required_fields(cls, data) -> None: ... @classmethod - def validate_raw_key(cls, key) -> None: ... + def validate_raw_key(cls, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi index 365e438e3e..efbcc909b0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/jwk.pyi @@ -4,7 +4,7 @@ from collections.abc import Collection, Mapping from authlib.jose.rfc7517 import Key, KeySet class JsonWebKey: - JWK_KEY_CLS: Incomplete + JWK_KEY_CLS: dict[Incomplete, Incomplete] @classmethod def generate_key(cls, kty, crv_or_size, options=None, is_private: bool = False): ... @classmethod diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/key_set.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/key_set.pyi index ebc8443de7..b1538b7240 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/key_set.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7517/key_set.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from collections.abc import Collection from authlib.jose.rfc7517 import Key @@ -5,6 +6,6 @@ from authlib.jose.rfc7517 import Key class KeySet: keys: Collection[Key] def __init__(self, keys) -> None: ... - def as_dict(self, is_private: bool = False, **params): ... - def as_json(self, is_private: bool = False, **params): ... + def as_dict(self, is_private: bool = False, **params) -> dict[str, list[Incomplete]]: ... + def as_json(self, is_private: bool = False, **params) -> str: ... def find_by_kid(self, kid, **params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi index 3a40d1d148..ea1c2d897d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_algs.pyi @@ -1,15 +1,18 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, ReadableBuffer from collections.abc import Iterable -from typing import ClassVar, Final +from typing import ClassVar, Final, SupportsBytes, SupportsIndex from authlib.jose.rfc7516 import JWEAlgorithm +from .oct_key import OctKey +from .rsa_key import RSAKey + class DirectAlgorithm(JWEAlgorithm): name: str description: str - def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset=None): ... + def prepare_key(self, raw_data) -> OctKey: ... + def generate_preset(self, enc_alg, key) -> dict[Incomplete, Incomplete]: ... + def wrap(self, enc_alg, headers, key, preset=None) -> dict[str, Incomplete]: ... def unwrap(self, enc_alg, ek, headers, key): ... class RSAAlgorithm(JWEAlgorithm): @@ -18,9 +21,9 @@ class RSAAlgorithm(JWEAlgorithm): description: str padding: Incomplete def __init__(self, name, description, pad_fn) -> None: ... - def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset=None): ... + def prepare_key(self, raw_data) -> RSAKey: ... + def generate_preset(self, enc_alg, key) -> dict[str, Incomplete]: ... + def wrap(self, enc_alg, headers, key, preset=None) -> dict[str, Incomplete]: ... def unwrap(self, enc_alg, ek, headers, key): ... class AESAlgorithm(JWEAlgorithm): @@ -28,11 +31,11 @@ class AESAlgorithm(JWEAlgorithm): description: str key_size: Incomplete def __init__(self, key_size) -> None: ... - def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... - def wrap_cek(self, cek, key): ... - def wrap(self, enc_alg, headers, key, preset=None): ... - def unwrap(self, enc_alg, ek, headers, key): ... + def prepare_key(self, raw_data) -> OctKey: ... + def generate_preset(self, enc_alg, key) -> dict[str, Incomplete]: ... + def wrap_cek(self, cek, key) -> dict[str, Incomplete]: ... + def wrap(self, enc_alg, headers, key, preset=None) -> dict[str, Incomplete]: ... + def unwrap(self, enc_alg, ek, headers, key) -> bytes: ... class AESGCMAlgorithm(JWEAlgorithm): EXTRA_HEADERS: ClassVar[Iterable[str]] @@ -40,10 +43,10 @@ class AESGCMAlgorithm(JWEAlgorithm): description: str key_size: Incomplete def __init__(self, key_size) -> None: ... - def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... - def wrap(self, enc_alg, headers, key, preset=None): ... - def unwrap(self, enc_alg, ek, headers, key): ... + def prepare_key(self, raw_data) -> OctKey: ... + def generate_preset(self, enc_alg, key) -> dict[str, Incomplete]: ... + def wrap(self, enc_alg, headers, key, preset=None) -> dict[str, Incomplete]: ... + def unwrap(self, enc_alg, ek, headers, key) -> bytes: ... class ECDHESAlgorithm(JWEAlgorithm): EXTRA_HEADERS: ClassVar[Iterable[str]] @@ -54,13 +57,15 @@ class ECDHESAlgorithm(JWEAlgorithm): aeskw: Incomplete def __init__(self, key_size=None) -> None: ... def prepare_key(self, raw_data): ... - def generate_preset(self, enc_alg, key): ... + def generate_preset(self, enc_alg, key) -> dict[str, Incomplete]: ... def compute_fixed_info(self, headers, bit_size): ... - def compute_derived_key(self, shared_key, fixed_info, bit_size): ... - def deliver(self, key, pubkey, headers, bit_size): ... - def wrap(self, enc_alg, headers, key, preset=None): ... - def unwrap(self, enc_alg, ek, headers, key): ... + def compute_derived_key(self, shared_key, fixed_info, bit_size) -> bytes: ... + def deliver(self, key, pubkey, headers, bit_size) -> bytes: ... + def wrap(self, enc_alg, headers, key, preset=None) -> dict[str, Incomplete]: ... + def unwrap(self, enc_alg, ek, headers, key) -> bytes: ... -def u32be_len_input(s, base64: bool = False): ... +def u32be_len_input( + s: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, base64: bool = False +) -> bytes: ... JWE_ALG_ALGORITHMS: Final[list[JWEAlgorithm]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_encs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_encs.pyi index 6d1dc2d73c..1f71240e48 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_encs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jwe_encs.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from typing import Final from authlib.jose.rfc7516 import JWEEncAlgorithm @@ -11,8 +12,8 @@ class CBCHS2EncAlgorithm(JWEEncAlgorithm): CEK_SIZE: Incomplete hash_alg: Incomplete def __init__(self, key_size, hash_type) -> None: ... - def encrypt(self, msg, aad, iv, key): ... - def decrypt(self, ciphertext, aad, iv, tag, key): ... + def encrypt(self, msg, aad, iv, key) -> tuple[bytes, bytes]: ... + def decrypt(self, ciphertext, aad, iv, tag, key) -> bytes: ... class GCMEncAlgorithm(JWEEncAlgorithm): IV_SIZE: int @@ -21,7 +22,7 @@ class GCMEncAlgorithm(JWEEncAlgorithm): key_size: Incomplete CEK_SIZE: Incomplete def __init__(self, key_size) -> None: ... - def encrypt(self, msg, aad, iv, key): ... - def decrypt(self, ciphertext, aad, iv, tag, key): ... + def encrypt(self, msg, aad, iv, key) -> tuple[bytes, bytes]: ... + def decrypt(self, ciphertext, aad, iv, tag, key) -> bytes: ... -JWE_ENC_ALGORITHMS: Incomplete +JWE_ENC_ALGORITHMS: Final[list[CBCHS2EncAlgorithm | GCMEncAlgorithm]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jws_algs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jws_algs.pyi index 47ca3a64e6..6f0246acea 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jws_algs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/jws_algs.pyi @@ -6,7 +6,7 @@ from authlib.jose.rfc7515 import JWSAlgorithm class NoneAlgorithm(JWSAlgorithm): name: str description: str - def prepare_key(self, raw_data) -> None: ... + def prepare_key(self, raw_data): ... def sign(self, msg, key): ... def verify(self, msg, sig, key) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi index bb2e6873cf..af1d546b7b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/oct_key.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from typing import ClassVar, Final +from typing_extensions import Self from authlib.jose.rfc7517 import Key @@ -15,10 +16,10 @@ class OctKey(Key): def get_op_key(self, operation): ... def load_raw_key(self) -> None: ... def load_dict_key(self) -> None: ... - def as_dict(self, is_private: bool = False, **params): ... + def as_dict(self, is_private: bool = False, **params) -> dict[Incomplete, Incomplete]: ... @classmethod - def validate_raw_key(cls, key): ... + def validate_raw_key(cls, key) -> bool: ... @classmethod - def import_key(cls, raw, options=None): ... + def import_key(cls, raw, options=None) -> Self: ... @classmethod - def generate_key(cls, key_size: int = 256, options=None, is_private: bool = True): ... + def generate_key(cls, key_size: int = 256, options=None, is_private: bool = True) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi index 45435c855f..18e332c2f8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/rsa_key.pyi @@ -1,4 +1,5 @@ from typing import ClassVar +from typing_extensions import Self from authlib.jose.rfc7517 import AsymmetricKey from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey @@ -18,6 +19,6 @@ class RSAKey(AsymmetricKey): @classmethod def generate_key(cls, key_size: int = 2048, options=None, is_private: bool = False) -> RSAKey: ... @classmethod - def import_dict_key(cls, raw, options=None): ... + def import_dict_key(cls, raw, options=None) -> Self: ... def has_all_prime_factors(obj) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/util.pyi index f691a9d274..1edc796a97 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7518/util.pyi @@ -1,2 +1,4 @@ -def encode_int(num, bits): ... -def decode_int(b): ... +from _typeshed import ReadableBuffer + +def encode_int(num, bits) -> bytes: ... +def decode_int(b: ReadableBuffer) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi index ea21ff1caf..b565feae8b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc7519/jwt.pyi @@ -17,7 +17,7 @@ class JsonWebToken: def __init__(self, algorithms, private_headers=None) -> None: ... def check_sensitive_data(self, payload) -> None: ... - def encode(self, header, payload, key, check: bool = True): ... + def encode(self, header, payload, key, check: bool = True) -> bytes: ... @overload def decode( self, @@ -37,7 +37,7 @@ class JsonWebToken: claims_params=None, ) -> _T: ... -def decode_payload(bytes_payload): ... +def decode_payload(bytes_payload) -> dict[Incomplete, Incomplete]: ... _TL = TypeVar("_TL", bound=tuple[Any, ...] | list[Any]) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/jws_eddsa.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/jws_eddsa.pyi index c6f1e628a1..93bb7d3d72 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/jws_eddsa.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/jws_eddsa.pyi @@ -1,10 +1,12 @@ from authlib.jose.rfc7515 import JWSAlgorithm +from .okp_key import OKPKey + class EdDSAAlgorithm(JWSAlgorithm): name: str description: str - def prepare_key(self, raw_data): ... + def prepare_key(self, raw_data) -> OKPKey: ... def sign(self, msg, key): ... - def verify(self, msg, sig, key): ... + def verify(self, msg, sig, key) -> bool: ... def register_jws_rfc8037(cls) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi index 085f798df7..47186b8fdb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/jose/rfc8037/okp_key.pyi @@ -17,9 +17,9 @@ class OKPKey(AsymmetricKey): PUBLIC_KEY_CLS: ClassVar[tuple[type, ...]] PRIVATE_KEY_CLS: ClassVar[tuple[type, ...]] SSH_PUBLIC_PREFIX: ClassVar[bytes] - def exchange_shared_key(self, pubkey): ... + def exchange_shared_key(self, pubkey) -> bytes: ... @staticmethod - def get_key_curve(key): ... + def get_key_curve(key) -> str | None: ... def load_private_key(self) -> Ed25519PrivateKey | Ed448PrivateKey | X25519PrivateKey | X448PrivateKey: ... def load_public_key(self) -> Ed25519PublicKey | Ed448PublicKey | X25519PublicKey | X448PublicKey: ... def dumps_private_key(self) -> dict[str, str | None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/client.pyi index 23a3b04573..460ad8ae3e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/client.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from typing import Any +from typing import Any, NoReturn from authlib.oauth1 import ClientAuth @@ -37,5 +37,5 @@ class OAuth1Client: def parse_authorization_response(self, url): ... def parse_response_token(self, status_code, text): ... @staticmethod - def handle_error(error_type, error_description) -> None: ... + def handle_error(error_type, error_description) -> NoReturn: ... def __del__(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi index 894cd4646f..5b124285c5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/authorization_server.pyi @@ -5,8 +5,8 @@ from authlib.oauth1.rfc5849.base_server import BaseServer class AuthorizationServer(BaseServer): TOKEN_RESPONSE_HEADER: Incomplete TEMPORARY_CREDENTIALS_METHOD: str - def create_oauth1_request(self, request) -> None: ... - def handle_response(self, status_code, payload, headers) -> None: ... + def create_oauth1_request(self, request): ... + def handle_response(self, status_code, payload, headers): ... def handle_error_response(self, error): ... def validate_temporary_credentials_request(self, request): ... def create_temporary_credentials_response(self, request=None): ... @@ -14,8 +14,8 @@ class AuthorizationServer(BaseServer): def create_authorization_response(self, request, grant_user=None): ... def validate_token_request(self, request): ... def create_token_response(self, request): ... - def create_temporary_credential(self, request) -> None: ... - def get_temporary_credential(self, request) -> None: ... - def delete_temporary_credential(self, request) -> None: ... - def create_authorization_verifier(self, request) -> None: ... - def create_token_credential(self, request) -> None: ... + def create_temporary_credential(self, request): ... + def get_temporary_credential(self, request): ... + def delete_temporary_credential(self, request): ... + def create_authorization_verifier(self, request): ... + def create_token_credential(self, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/base_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/base_server.pyi index 17f8ad9531..2508c95303 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/base_server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/base_server.pyi @@ -8,5 +8,5 @@ class BaseServer: def register_signature_method(cls, name, verify) -> None: ... def validate_timestamp_and_nonce(self, request) -> None: ... def validate_oauth_signature(self, request) -> None: ... - def get_client_by_id(self, client_id) -> None: ... - def exists_nonce(self, nonce, request) -> None: ... + def get_client_by_id(self, client_id): ... + def exists_nonce(self, nonce, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi index 5a4b54f1ef..e0214cc5bb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/client_auth.pyi @@ -32,10 +32,10 @@ class ClientAuth: realm=None, force_include_body: bool = False, ) -> None: ... - def get_oauth_signature(self, method, uri, headers, body): ... - def get_oauth_params(self, nonce, timestamp): ... - def sign(self, method, uri, headers, body): ... - def prepare(self, method, uri, headers, body): ... + def get_oauth_signature(self, method, uri, headers, body) -> str: ... + def get_oauth_params(self, nonce, timestamp) -> list[Incomplete]: ... + def sign(self, method, uri, headers, body) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def prepare(self, method, uri, headers, body) -> tuple[Incomplete, ...]: ... -def generate_nonce(): ... -def generate_timestamp(): ... +def generate_nonce() -> str: ... +def generate_timestamp() -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi index 7853be8199..f6140d851d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/errors.pyi @@ -2,7 +2,7 @@ from authlib.common.errors import AuthlibHTTPError class OAuth1Error(AuthlibHTTPError): def __init__(self, description=None, uri=None, status_code=None) -> None: ... - def get_headers(self): ... + def get_headers(self) -> list[tuple[str, str]]: ... class InsecureTransportError(OAuth1Error): error: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/models.pyi index 4deaea6efd..dfeba9680b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/models.pyi @@ -1,16 +1,16 @@ class ClientMixin: - def get_default_redirect_uri(self) -> None: ... - def get_client_secret(self) -> None: ... - def get_rsa_public_key(self) -> None: ... + def get_default_redirect_uri(self): ... + def get_client_secret(self): ... + def get_rsa_public_key(self): ... class TokenCredentialMixin: - def get_oauth_token(self) -> None: ... - def get_oauth_token_secret(self) -> None: ... + def get_oauth_token(self): ... + def get_oauth_token_secret(self): ... class TemporaryCredentialMixin(TokenCredentialMixin): - def get_client_id(self) -> None: ... - def get_redirect_uri(self) -> None: ... - def check_verifier(self, verifier) -> None: ... + def get_client_id(self): ... + def get_redirect_uri(self): ... + def check_verifier(self, verifier) -> bool: ... class TemporaryCredential(dict[str, object], TemporaryCredentialMixin): def get_client_id(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi index 266c3f7fb9..543a091de4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/parameters.pyi @@ -1,3 +1,3 @@ def prepare_headers(oauth_params, headers=None, realm=None): ... -def prepare_form_encoded_body(oauth_params, body): ... +def prepare_form_encoded_body(oauth_params, body) -> str: ... def prepare_request_uri_query(oauth_params, uri): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/resource_protector.pyi index 78c3fbe96c..45f147d4d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/resource_protector.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/resource_protector.pyi @@ -1,5 +1,7 @@ from authlib.oauth1.rfc5849.base_server import BaseServer +from .wrapper import OAuth1Request + class ResourceProtector(BaseServer): - def validate_request(self, method, uri, body, headers): ... - def get_token_credential(self, request) -> None: ... + def validate_request(self, method, uri, body, headers) -> OAuth1Request: ... + def get_token_credential(self, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/rsa.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/rsa.pyi index c08bb419df..8e650fbd9c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/rsa.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/rsa.pyi @@ -1,2 +1,2 @@ def sign_sha1(msg, rsa_private_key): ... -def verify_sha1(sig, msg, rsa_public_key): ... +def verify_sha1(sig, msg, rsa_public_key) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi index 5e459220c5..1fb8119798 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/signature.pyi @@ -1,20 +1,22 @@ -SIGNATURE_HMAC_SHA1: str -SIGNATURE_RSA_SHA1: str -SIGNATURE_PLAINTEXT: str -SIGNATURE_TYPE_HEADER: str -SIGNATURE_TYPE_QUERY: str -SIGNATURE_TYPE_BODY: str +from typing import Final -def construct_base_string(method, uri, params, host=None): ... +SIGNATURE_HMAC_SHA1: Final = "HMAC-SHA1" +SIGNATURE_RSA_SHA1: Final = "RSA-SHA1" +SIGNATURE_PLAINTEXT: Final = "PLAINTEXT" +SIGNATURE_TYPE_HEADER: Final = "HEADER" +SIGNATURE_TYPE_QUERY: Final = "QUERY" +SIGNATURE_TYPE_BODY: Final = "BODY" + +def construct_base_string(method, uri, params, host=None) -> str: ... def normalize_base_string_uri(uri, host=None): ... -def normalize_parameters(params): ... -def generate_signature_base_string(request): ... -def hmac_sha1_signature(base_string, client_secret, token_secret): ... -def rsa_sha1_signature(base_string, rsa_private_key): ... -def plaintext_signature(client_secret, token_secret): ... -def sign_hmac_sha1(client, request): ... -def sign_rsa_sha1(client, request): ... -def sign_plaintext(client, request): ... -def verify_hmac_sha1(request): ... -def verify_rsa_sha1(request): ... -def verify_plaintext(request): ... +def normalize_parameters(params) -> str: ... +def generate_signature_base_string(request) -> str: ... +def hmac_sha1_signature(base_string, client_secret, token_secret) -> str: ... +def rsa_sha1_signature(base_string, rsa_private_key) -> str: ... +def plaintext_signature(client_secret, token_secret) -> str: ... +def sign_hmac_sha1(client, request) -> str: ... +def sign_rsa_sha1(client, request) -> str: ... +def sign_plaintext(client, request) -> str: ... +def verify_hmac_sha1(request) -> bool: ... +def verify_rsa_sha1(request) -> bool: ... +def verify_plaintext(request) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/util.pyi index 7d64aaa661..a7e79606ff 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth1/rfc5849/util.pyi @@ -1,2 +1,2 @@ -def escape(s): ... -def unescape(s): ... +def escape(s) -> str: ... +def unescape(s: str | bytes) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/base.pyi index 366694c719..218040b4fc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/base.pyi @@ -18,5 +18,5 @@ class OAuth2Error(AuthlibHTTPError): redirect_fragment: bool = False, error=None, ) -> None: ... - def get_body(self): ... + def get_body(self) -> list[Incomplete]: ... def __call__(self, uri=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/client.pyi index 17fa264628..56914286ef 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/client.pyi @@ -42,7 +42,7 @@ class OAuth2Client: **metadata, ) -> None: ... def register_client_auth_method(self, auth) -> None: ... - def client_auth(self, auth_method): ... + def client_auth(self, auth_method) -> ClientAuth: ... @property def token(self): ... @token.setter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi index 49d3afce88..5614f93883 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/authorization_server.pyi @@ -28,7 +28,7 @@ class AuthorizationServer(Hookable): def authenticate_client(self, request: OAuth2Request, methods: Collection[str], endpoint: str = "token") -> ClientMixin: ... def register_client_auth_method(self, method, func) -> None: ... def register_extension(self, extension) -> None: ... - def get_error_uri(self, request, error) -> None: ... + def get_error_uri(self, request, error): ... def send_signal(self, name, *args: object, **kwargs: object) -> None: ... def create_oauth2_request(self, request) -> OAuth2Request: ... def create_json_request(self, request) -> JsonRequest: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/authorization_code.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/authorization_code.pyi index 356a77d2d6..e121a2d014 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/authorization_code.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/authorization_code.pyi @@ -19,9 +19,9 @@ class AuthorizationCodeGrant(BaseGrant, AuthorizationEndpointMixin, TokenEndpoin def validate_token_request(self) -> None: ... def create_token_response(self) -> _ServerResponse: ... def generate_authorization_code(self) -> str: ... - def save_authorization_code(self, code: str, request: OAuth2Request) -> None: ... + def save_authorization_code(self, code: str, request: OAuth2Request): ... def query_authorization_code(self, code: str, client: ClientMixin): ... - def delete_authorization_code(self, authorization_code) -> None: ... + def delete_authorization_code(self, authorization_code): ... def authenticate_user(self, authorization_code): ... def validate_code_authorization_request(grant: AuthorizationCodeGrant) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi index 88b9779595..4ff9c136ef 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/base.pyi @@ -36,7 +36,7 @@ class TokenEndpointMixin: TOKEN_ENDPOINT_HTTP_METHODS: Incomplete GRANT_TYPE: Incomplete @classmethod - def check_token_endpoint(cls, request: OAuth2Request): ... + def check_token_endpoint(cls, request: OAuth2Request) -> bool: ... def validate_token_request(self) -> None: ... def create_token_response(self) -> _ServerResponse: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.pyi index ad18a6eb0d..13037ed8d2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.pyi @@ -8,4 +8,4 @@ class ResourceOwnerPasswordCredentialsGrant(BaseGrant, TokenEndpointMixin): GRANT_TYPE: str def validate_token_request(self) -> None: ... def create_token_response(self): ... - def authenticate_user(self, username, password) -> None: ... + def authenticate_user(self, username, password): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi index 1a69dd8247..615c0493b7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/parameters.pyi @@ -1,4 +1,6 @@ +from _typeshed import Incomplete + def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, scope=None, state=None, **kwargs): ... -def prepare_token_request(grant_type, body: str = "", redirect_uri=None, **kwargs): ... -def parse_authorization_code_response(uri, state=None): ... -def parse_implicit_response(uri, state=None): ... +def prepare_token_request(grant_type, body: str = "", redirect_uri=None, **kwargs) -> str: ... +def parse_authorization_code_response(uri, state=None) -> dict[Incomplete, Incomplete]: ... +def parse_implicit_response(uri, state=None) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi index db0f519663..930a52fcc2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/resource_protector.pyi @@ -7,7 +7,7 @@ class TokenValidator: def __init__(self, realm=None, **extra_attributes) -> None: ... @staticmethod def scope_insufficient(token_scopes, required_scopes): ... - def authenticate_token(self, token_string) -> None: ... + def authenticate_token(self, token_string): ... def validate_request(self, request) -> None: ... def validate_token(self, token, scopes, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/token_endpoint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/token_endpoint.pyi index 72fcae8f9f..80c20beaf5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/token_endpoint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/token_endpoint.pyi @@ -9,5 +9,5 @@ class TokenEndpoint: def __call__(self, request): ... def create_endpoint_request(self, request): ... def authenticate_endpoint_client(self, request): ... - def authenticate_token(self, request, client) -> None: ... - def create_endpoint_response(self, request) -> None: ... + def authenticate_token(self, request, client): ... + def create_endpoint_response(self, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/wrappers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/wrappers.pyi index 54a40f05a1..ab80ddf1a7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/wrappers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6749/wrappers.pyi @@ -1,5 +1,5 @@ class OAuth2Token(dict[str, object]): def __init__(self, params) -> None: ... - def is_expired(self, leeway: int = 60): ... + def is_expired(self, leeway: int = 60) -> bool | None: ... @classmethod def from_dict(cls, token): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi index 5144b65718..570a94f286 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/errors.pyi @@ -11,7 +11,7 @@ class InvalidTokenError(OAuth2Error): realm: Incomplete extra_attributes: Incomplete def __init__(self, description=None, uri=None, status_code=None, state=None, realm=None, **extra_attributes) -> None: ... - def get_headers(self): ... + def get_headers(self) -> list[tuple[str, str]]: ... class InsufficientScopeError(OAuth2Error): error: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi index 3eb07c4cc2..2548a28624 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/parameters.pyi @@ -1,4 +1,4 @@ def add_to_uri(token, uri): ... def add_to_headers(token, headers=None): ... -def add_to_body(token, body=None): ... +def add_to_body(token, body=None) -> str: ... def add_bearer_token(token, uri, headers, body, placement: str = "header"): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/validator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/validator.pyi index 19686a9411..87adfb5a67 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/validator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc6750/validator.pyi @@ -2,5 +2,5 @@ from authlib.oauth2.rfc6749 import TokenValidator class BearerTokenValidator(TokenValidator): TOKEN_TYPE: str - def authenticate_token(self, token_string) -> None: ... + def authenticate_token(self, token_string): ... def validate_token(self, token, scopes, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7009/revocation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7009/revocation.pyi index 93e53aedc7..7bd58dc415 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7009/revocation.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7009/revocation.pyi @@ -5,5 +5,5 @@ class RevocationEndpoint(TokenEndpoint): def authenticate_token(self, request, client): ... def check_params(self, request, client) -> None: ... def create_endpoint_response(self, request): ... - def query_token(self, token_string, token_type_hint) -> None: ... - def revoke_token(self, token, request) -> None: ... + def query_token(self, token_string, token_type_hint): ... + def revoke_token(self, token, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi index 01ddea88ac..bef3fad2cb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/assertion.pyi @@ -1,5 +1,5 @@ def sign_jwt_bearer_assertion( key, issuer, audience, subject=None, issued_at=None, expires_at=None, claims=None, header=None, **kwargs -): ... -def client_secret_jwt_sign(client_secret, client_id, token_endpoint, alg: str = "HS256", claims=None, **kwargs): ... -def private_key_jwt_sign(private_key, client_id, token_endpoint, alg: str = "RS256", claims=None, **kwargs): ... +) -> bytes: ... +def client_secret_jwt_sign(client_secret, client_id, token_endpoint, alg: str = "HS256", claims=None, **kwargs) -> bytes: ... +def private_key_jwt_sign(private_key, client_id, token_endpoint, alg: str = "RS256", claims=None, **kwargs) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi index 0ca742f08e..95e22de2b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/auth.pyi @@ -7,10 +7,10 @@ class ClientSecretJWT: claims: Incomplete headers: Incomplete def __init__(self, token_endpoint=None, claims=None, headers=None, alg=None) -> None: ... - def sign(self, auth, token_endpoint): ... + def sign(self, auth, token_endpoint) -> bytes: ... def __call__(self, auth, method, uri, headers, body): ... class PrivateKeyJWT(ClientSecretJWT): name: str alg: str - def sign(self, auth, token_endpoint): ... + def sign(self, auth, token_endpoint) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi index 728141d878..f0c5895f6f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/client.pyi @@ -1,6 +1,8 @@ from logging import Logger from typing import Final +from authlib.jose.rfc7519.claims import JWTClaims + ASSERTION_TYPE: Final[str] log: Logger @@ -12,8 +14,8 @@ class JWTBearerClientAssertion: def __init__(self, token_url: str, validate_jti: bool = True, leeway: int = 60) -> None: ... def __call__(self, query_client, request): ... def create_claims_options(self): ... - def process_assertion_claims(self, assertion, resolve_key): ... + def process_assertion_claims(self, assertion, resolve_key) -> JWTClaims: ... def authenticate_client(self, client): ... def create_resolve_key_func(self, query_client, request): ... - def validate_jti(self, claims, jti) -> None: ... - def resolve_client_public_key(self, client, headers) -> None: ... + def validate_jti(self, claims, jti): ... + def resolve_client_public_key(self, client, headers): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi index 151e5dbef9..ea9209205c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7523/jwt_bearer.pyi @@ -1,6 +1,7 @@ from logging import Logger from typing import ClassVar, Final +from authlib.jose.rfc7519.claims import JWTClaims from authlib.oauth2.rfc6749 import BaseGrant, TokenEndpointMixin log: Logger @@ -12,11 +13,11 @@ class JWTBearerGrant(BaseGrant, TokenEndpointMixin): LEEWAY: ClassVar[int] @staticmethod def sign(key, issuer, audience, subject=None, issued_at=None, expires_at=None, claims=None, **kwargs): ... - def process_assertion_claims(self, assertion): ... + def process_assertion_claims(self, assertion) -> JWTClaims: ... def resolve_public_key(self, headers, payload): ... def validate_token_request(self) -> None: ... def create_token_response(self): ... - def resolve_issuer_client(self, issuer) -> None: ... - def resolve_client_key(self, client, headers, payload) -> None: ... - def authenticate_user(self, subject) -> None: ... - def has_granted_permission(self, client, user) -> None: ... + def resolve_issuer_client(self, issuer): ... + def resolve_client_key(self, client, headers, payload): ... + def authenticate_user(self, subject): ... + def has_granted_permission(self, client, user) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi index dad1e00392..d3d94d4427 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7591/endpoint.pyi @@ -18,7 +18,7 @@ class ClientRegistrationEndpoint: def create_endpoint_request(self, request): ... def generate_client_id(self, request) -> str: ... def generate_client_secret(self, request) -> str: ... - def get_server_metadata(self) -> None: ... - def authenticate_token(self, request) -> None: ... - def resolve_public_key(self, request) -> None: ... - def save_client(self, client_info, client_metadata, request) -> None: ... + def get_server_metadata(self): ... + def authenticate_token(self, request): ... + def resolve_public_key(self, request): ... + def save_client(self, client_info, client_metadata, request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi index 4f0b0e5276..fce1d9d54e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7592/endpoint.pyi @@ -14,11 +14,11 @@ class ClientConfigurationEndpoint: def create_update_client_response(self, client, request): ... def extract_client_metadata(self, request): ... def introspect_client(self, client): ... - def generate_client_registration_info(self, client, request) -> None: ... - def authenticate_token(self, request) -> None: ... - def authenticate_client(self, request) -> None: ... - def revoke_access_token(self, token, request) -> None: ... - def check_permission(self, client, request) -> None: ... - def delete_client(self, client, request) -> None: ... - def update_client(self, client, client_metadata, request) -> None: ... - def get_server_metadata(self) -> None: ... + def generate_client_registration_info(self, client, request): ... + def authenticate_token(self, request): ... + def authenticate_client(self, request): ... + def revoke_access_token(self, token, request): ... + def check_permission(self, client, request): ... + def delete_client(self, client, request): ... + def update_client(self, client, client_metadata, request): ... + def get_server_metadata(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/introspection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/introspection.pyi index 56766e27be..58d178475a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/introspection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/introspection.pyi @@ -6,6 +6,6 @@ class IntrospectionEndpoint(TokenEndpoint): def check_params(self, request, client) -> None: ... def create_endpoint_response(self, request): ... def create_introspection_payload(self, token): ... - def check_permission(self, token, client, request) -> None: ... - def query_token(self, token_string, token_type_hint) -> None: ... - def introspect_token(self, token) -> None: ... + def check_permission(self, token, client, request): ... + def query_token(self, token_string, token_type_hint): ... + def introspect_token(self, token): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/token_validator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/token_validator.pyi index 897e7ec548..8b7a74400a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/token_validator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc7662/token_validator.pyi @@ -2,6 +2,6 @@ from authlib.oauth2.rfc6749 import TokenValidator class IntrospectTokenValidator(TokenValidator): TOKEN_TYPE: str - def introspect_token(self, token_string) -> None: ... + def introspect_token(self, token_string): ... def authenticate_token(self, token_string): ... def validate_token(self, token, scopes, request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8414/well_known.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8414/well_known.pyi index 9289f0d478..bcf7aae84e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8414/well_known.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8414/well_known.pyi @@ -1 +1 @@ -def get_well_known_url(issuer, external: bool = False, suffix: str = "oauth-authorization-server"): ... +def get_well_known_url(issuer: str, external: bool = False, suffix: str = "oauth-authorization-server") -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/device_code.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/device_code.pyi index 0e7d221726..da0bd0bd1b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/device_code.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/device_code.pyi @@ -12,6 +12,6 @@ class DeviceCodeGrant(BaseGrant, TokenEndpointMixin): def validate_token_request(self) -> None: ... def create_token_response(self): ... def validate_device_credential(self, credential): ... - def query_device_credential(self, device_code) -> None: ... - def query_user_grant(self, user_code) -> None: ... - def should_slow_down(self, credential) -> None: ... + def query_device_credential(self, device_code): ... + def query_user_grant(self, user_code): ... + def should_slow_down(self, credential): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/endpoint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/endpoint.pyi index 5d0718e548..06d0d884a9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/endpoint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/endpoint.pyi @@ -14,8 +14,8 @@ class DeviceAuthorizationEndpoint: def create_endpoint_response(self, request): ... def generate_user_code(self): ... def generate_device_code(self): ... - def get_verification_uri(self) -> None: ... - def save_device_credential(self, client_id, scope, data) -> None: ... + def get_verification_uri(self): ... + def save_device_credential(self, client_id, scope, data): ... def create_string_user_code(): ... def create_digital_user_code(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/models.pyi index 7da0cca1e1..a9b987cd86 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc8628/models.pyi @@ -1,8 +1,8 @@ class DeviceCredentialMixin: - def get_client_id(self) -> None: ... - def get_scope(self) -> None: ... - def get_user_code(self) -> None: ... - def is_expired(self) -> None: ... + def get_client_id(self): ... + def get_scope(self): ... + def get_user_code(self): ... + def is_expired(self): ... class DeviceCredentialDict(dict[str, object], DeviceCredentialMixin): def get_client_id(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi index 3e36571ed8..2187442f8f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/introspection.pyi @@ -9,5 +9,5 @@ class JWTIntrospectionEndpoint(IntrospectionEndpoint): def create_endpoint_response(self, request): ... def authenticate_token(self, request, client): ... def create_introspection_payload(self, token): ... - def get_jwks(self) -> None: ... + def get_jwks(self): ... def get_username(self, user_id: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi index df8ac21b5f..13eee4f998 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/revocation.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete +from typing import NoReturn from authlib.oauth2.rfc7009 import RevocationEndpoint class JWTRevocationEndpoint(RevocationEndpoint): issuer: Incomplete def __init__(self, issuer, server=None, *args, **kwargs) -> None: ... - def authenticate_token(self, request, client) -> None: ... - def get_jwks(self) -> None: ... + def authenticate_token(self, request, client) -> NoReturn: ... + def get_jwks(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi index 9591435187..9f6f310ea3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token.pyi @@ -6,7 +6,7 @@ class JWTBearerTokenGenerator(BearerTokenGenerator): issuer: Incomplete alg: Incomplete def __init__(self, issuer, alg: str = "RS256", refresh_token_generator=None, expires_generator=None) -> None: ... - def get_jwks(self) -> None: ... + def get_jwks(self): ... def get_extra_claims(self, client, grant_type, user, scope): ... def get_audiences(self, client, user, scope) -> str | list[str]: ... def get_acr(self, user) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi index 70650d7443..fe6d9f9edf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oauth2/rfc9068/token_validator.pyi @@ -6,7 +6,7 @@ class JWTBearerTokenValidator(BearerTokenValidator): issuer: Incomplete resource_server: Incomplete def __init__(self, issuer, resource_server, *args, **kwargs) -> None: ... - def get_jwks(self) -> None: ... + def get_jwks(self): ... def validate_iss(self, claims, iss: str) -> bool: ... def authenticate_token(self, token_string): ... def validate_token(self, token, scopes, request, groups=None, roles=None, entitlements=None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/claims.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/claims.pyi index d58ac36706..2a2f9f47ab 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/claims.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/claims.pyi @@ -1,11 +1,9 @@ -from _typeshed import Incomplete - from authlib.jose import JWTClaims __all__ = ["IDToken", "CodeIDToken", "ImplicitIDToken", "HybridIDToken", "UserInfo", "get_claim_cls_by_response_type"] class IDToken(JWTClaims): - ESSENTIAL_CLAIMS: Incomplete + ESSENTIAL_CLAIMS: list[str] def validate(self, now=None, leeway: int = 0) -> None: ... def validate_auth_time(self) -> None: ... def validate_nonce(self) -> None: ... @@ -15,15 +13,15 @@ class IDToken(JWTClaims): def validate_at_hash(self) -> None: ... class CodeIDToken(IDToken): - RESPONSE_TYPES: Incomplete + RESPONSE_TYPES: tuple[str, ...] class ImplicitIDToken(IDToken): - RESPONSE_TYPES: Incomplete - ESSENTIAL_CLAIMS: Incomplete + RESPONSE_TYPES: tuple[str, ...] + ESSENTIAL_CLAIMS: list[str] def validate_at_hash(self) -> None: ... class HybridIDToken(ImplicitIDToken): - RESPONSE_TYPES: Incomplete + RESPONSE_TYPES: tuple[str, ...] def validate(self, now=None, leeway: int = 0) -> None: ... def validate_c_hash(self) -> None: ... @@ -33,4 +31,4 @@ class UserInfo(dict[str, object]): def filter(self, scope: str) -> UserInfo: ... def __getattr__(self, key): ... -def get_claim_cls_by_response_type(response_type): ... +def get_claim_cls_by_response_type(response_type) -> type: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/code.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/code.pyi index b2339b061c..69b48d93c5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/code.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/code.pyi @@ -1,13 +1,14 @@ from logging import Logger from authlib.oauth2 import OAuth2Request +from authlib.oauth2.client import OAuth2Client from authlib.oauth2.rfc6749 import BaseGrant from authlib.oidc.core import UserInfo log: Logger class OpenIDToken: - def get_jwt_config(self, grant: BaseGrant) -> dict[str, str | int]: ... + def get_jwt_config(self, grant: BaseGrant, client: OAuth2Client) -> dict[str, str | int]: ... def generate_user_info(self, user, scope: str) -> UserInfo: ... def get_audiences(self, request: OAuth2Request) -> list[str]: ... def process_token(self, grant: BaseGrant, response) -> dict[str, str | int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/hybrid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/hybrid.pyi index c57e53dbce..f2652cc1b9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/hybrid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/hybrid.pyi @@ -10,7 +10,7 @@ class OpenIDHybridGrant(OpenIDImplicitGrant): RESPONSE_TYPES: Incomplete GRANT_TYPE: str DEFAULT_RESPONSE_MODE: str - def generate_authorization_code(self): ... - def save_authorization_code(self, code, request) -> None: ... - def validate_authorization_request(self): ... - def create_granted_params(self, grant_user): ... + def generate_authorization_code(self) -> str: ... + def save_authorization_code(self, code, request): ... + def validate_authorization_request(self) -> str: ... + def create_granted_params(self, grant_user) -> list[tuple[str, str]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi index a25ddc5557..73be6e9361 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/grants/implicit.pyi @@ -1,17 +1,19 @@ from _typeshed import Incomplete from logging import Logger +from authlib.oauth2.client import OAuth2Client from authlib.oauth2.rfc6749 import ImplicitGrant +from authlib.oidc.core import UserInfo log: Logger class OpenIDImplicitGrant(ImplicitGrant): RESPONSE_TYPES: Incomplete DEFAULT_RESPONSE_MODE: str - def exists_nonce(self, nonce, request) -> None: ... - def get_jwt_config(self) -> None: ... - def generate_user_info(self, user, scope) -> None: ... - def get_audiences(self, request): ... + def exists_nonce(self, nonce, request) -> bool: ... + def get_jwt_config(self, client: OAuth2Client) -> dict[str, Incomplete]: ... + def generate_user_info(self, user, scope) -> UserInfo: ... + def get_audiences(self, request) -> list[Incomplete]: ... def validate_authorization_request(self) -> str: ... def validate_consent_request(self) -> str: ... def create_authorization_response(self, redirect_uri, grant_user): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/userinfo.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/userinfo.pyi index 0c612351b3..0bec9bda3d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/userinfo.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/userinfo.pyi @@ -11,7 +11,7 @@ class UserInfoEndpoint: def __init__( self, server: AuthorizationServer | None = None, resource_protector: ResourceProtector | None = None ) -> None: ... - def create_endpoint_request(self, request: OAuth2Request): ... + def create_endpoint_request(self, request: OAuth2Request) -> OAuth2Request: ... def __call__(self, request: OAuth2Request) -> tuple[int, dict[str, str | None], list[tuple[str, str]]]: ... def generate_user_info(self, user, scope: str) -> UserInfo: ... def get_issuer(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/util.pyi index 542c8c7a31..5c0a4bff37 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/core/util.pyi @@ -1 +1,7 @@ -def create_half_hash(s: str, alg: str) -> str: ... +from _typeshed import ReadableBuffer +from collections.abc import Iterable +from typing import SupportsBytes, SupportsIndex + +def create_half_hash( + s: str | bytes | float | Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, alg: str +) -> bytes | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/models.pyi index 5891822ec3..63f2211780 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/models.pyi @@ -1,9 +1,7 @@ -from _typeshed import Incomplete - from authlib.oauth2.rfc8414 import AuthorizationServerMetadata class OpenIDProviderMetadata(AuthorizationServerMetadata): - REGISTRY_KEYS: Incomplete + REGISTRY_KEYS: list[str] def validate_jwks_uri(self): ... def validate_acr_values_supported(self) -> None: ... def validate_subject_types_supported(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/well_known.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/well_known.pyi index 3a294ec431..b10c03b654 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/well_known.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Authlib/authlib/oidc/discovery/well_known.pyi @@ -1 +1 @@ -def get_well_known_url(issuer, external: bool = False): ... +def get_well_known_url(issuer: str, external: bool = False) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml index 3a87312127..d565392659 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml @@ -1,3 +1,3 @@ -version = "~=1.2.15" +version = "~=1.3.1" upstream_repository = "https://github.com/tantale/deprecated" requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/__init__.pyi index c7b200a4e0..3d4b2bfd8a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/__init__.pyi @@ -1,4 +1,9 @@ +from typing import Final + from .classic import deprecated as deprecated +from .params import deprecated_params as deprecated_params -__credits__: str -__date__: str +__version__: Final[str] +__author__: Final[str] +__date__: Final[str] +__credits__: Final[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/params.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/params.pyi new file mode 100644 index 0000000000..b1955646d4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/params.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable, Iterable +from inspect import Signature +from typing import Any, TypeVar +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") +_R = TypeVar("_R") + +class DeprecatedParams: + messages: dict[str, str] + category: type[Warning] + def __init__(self, param: str | dict[str, str], reason: str = "", category: type[Warning] = ...) -> None: ... + def populate_messages(self, param: str | dict[str, str], reason: str = "") -> None: ... + def check_params( + self, signature: Signature, *args: Any, **kwargs: Any # args and kwargs passing to Signature.bind method + ) -> list[str]: ... + def warn_messages(self, messages: Iterable[str]) -> None: ... + def __call__(self, f: Callable[_P, _R]) -> Callable[_P, _R]: ... + +deprecated_params = DeprecatedParams diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/METADATA.toml deleted file mode 100644 index b65bbc4661..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/METADATA.toml +++ /dev/null @@ -1,3 +0,0 @@ -version = "3.0.*" -upstream_repository = "https://github.com/ianare/exif-py" -obsolete_since = "3.1.0" # Released on 2025-04-25 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/__init__.pyi deleted file mode 100644 index eede72dcd5..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/__init__.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from logging import Logger -from typing import Any - -from ._types import Reader - -__version__: str -logger: Logger - -def process_file( - fh: Reader, - stop_tag: str = "UNDEF", - details: bool = True, - strict: bool = False, - debug: bool = False, - truncate_tags: bool = True, - auto_seek: bool = True, -) -> dict[str, Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/_types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/_types.pyi deleted file mode 100644 index 2c2e847706..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/_types.pyi +++ /dev/null @@ -1,15 +0,0 @@ -# Stubs-only module with type aliases for ExifRead. - -from collections.abc import Iterator -from typing import Any, Literal, Protocol -from typing_extensions import TypeAlias - -# The second item of the value tuple - if it exists - can be a variety of types, -# including a callable or another dict. -TagDict: TypeAlias = dict[int, tuple[str] | tuple[str, Any]] - -class Reader(Protocol): - def __iter__(self) -> Iterator[bytes]: ... - def read(self, size: int, /) -> bytes: ... - def tell(self) -> int: ... - def seek(self, offset: int, whence: Literal[0, 1] = ..., /) -> object: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/classes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/classes.pyi deleted file mode 100644 index 2d7611b63d..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/classes.pyi +++ /dev/null @@ -1,47 +0,0 @@ -from logging import Logger -from typing import Any, Literal - -from ._types import Reader, TagDict - -logger: Logger - -class IfdTag: - printable: str - tag: int - field_type: int - field_offset: int - field_length: int - values: Any # either string, bytes or list of data items - def __init__(self, printable: str, tag: int, field_type: int, values: Any, field_offset: int, field_length: int) -> None: ... - -class ExifHeader: - file_handle: Reader - endian: Literal["I", "M"] - offset: int - fake_exif: bool - strict: bool - debug: bool - detailed: bool - truncate_tags: bool - tags: dict[str, Any] - def __init__( - self, - file_handle: Reader, - endian: Literal["I", "M"], - offset: int, - fake_exif: bool, - strict: bool, - debug: bool = False, - detailed: bool = True, - truncate_tags: bool = True, - ) -> None: ... - def s2n(self, offset: int, length: int, signed: bool = False) -> int: ... - def n2b(self, offset: int, length: int) -> bytes: ... - def list_ifd(self) -> list[int]: ... - def dump_ifd( - self, ifd: int, ifd_name: str, tag_dict: TagDict | None = None, relative: int = 0, stop_tag: str = "UNDEF" - ) -> None: ... - def extract_tiff_thumbnail(self, thumb_ifd: int) -> None: ... - def extract_jpeg_thumbnail(self) -> None: ... - def decode_maker_note(self) -> None: ... - def parse_xmp(self, xmp_bytes: bytes) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exceptions.pyi deleted file mode 100644 index 47b39e3095..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exceptions.pyi +++ /dev/null @@ -1,2 +0,0 @@ -class InvalidExif(Exception): ... -class ExifNotFound(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exif_log.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exif_log.pyi deleted file mode 100644 index f7cbc25218..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/exif_log.pyi +++ /dev/null @@ -1,24 +0,0 @@ -import logging -from typing import TextIO - -TEXT_NORMAL: int -TEXT_BOLD: int -TEXT_RED: int -TEXT_GREEN: int -TEXT_YELLOW: int -TEXT_BLUE: int -TEXT_MAGENTA: int -TEXT_CYAN: int - -def get_logger() -> logging.Logger: ... -def setup_logger(debug: bool, color: bool) -> None: ... - -class Formatter(logging.Formatter): - color: bool - debug: bool - def __init__(self, debug: bool = False, color: bool = False) -> None: ... - -class Handler(logging.StreamHandler[TextIO]): - color: bool - debug: bool - def __init__(self, log_level: logging._Level, debug: bool = False, color: bool = False) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/heic.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/heic.pyi deleted file mode 100644 index 7fd0e1f839..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/heic.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from collections.abc import Callable -from logging import Logger - -from ._types import Reader - -logger: Logger - -class WrongBox(Exception): ... -class NoParser(Exception): ... -class BoxVersion(Exception): ... -class BadSize(Exception): ... - -class Box: - version: int - minor_version: int - item_count: int - size: int - after: int - pos: int - compat: list[bytes] - base_offset: int - subs: dict[str, Box] - locs: dict[int, list[tuple[int, int]]] - exif_infe: Box | None - item_id: int - item_type: bytes - item_name: bytes - item_protection_index: int - major_brand: bytes - offset_size: int - length_size: int - base_offset_size: int - index_size: int - flags: int - name: str - def __init__(self, name: str) -> None: ... - def set_sizes(self, offset: int, length: int, base_offset: int, index: int) -> None: ... - def set_full(self, vflags: int) -> None: ... - -class HEICExifFinder: - file_handle: Reader - def __init__(self, file_handle: Reader) -> None: ... - def get(self, nbytes: int) -> bytes: ... - def get16(self) -> int: ... - def get32(self) -> int: ... - def get64(self) -> int: ... - def get_int4x2(self) -> tuple[int, int]: ... - def get_int(self, size: int) -> int: ... - def get_string(self) -> bytes: ... - def next_box(self) -> Box: ... - def get_full(self, box: Box) -> None: ... - def skip(self, box: Box) -> None: ... - def expect_parse(self, name: str) -> Box: ... - def get_parser(self, box: Box) -> Callable[[Box], None]: ... - def parse_box(self, box: Box) -> Box: ... - def find_exif(self) -> tuple[int, bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/jpeg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/jpeg.pyi deleted file mode 100644 index 9b17915041..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/jpeg.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from logging import Logger - -from ._types import Reader - -logger: Logger - -def find_jpeg_exif(fh: Reader, data: bytes, fake_exif: bool) -> tuple[int, bytes, bool]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/__init__.pyi deleted file mode 100644 index ce0670d24c..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/__init__.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from _typeshed import Incomplete - -from exifread.tags.exif import EXIF_TAGS as EXIF_TAGS -from exifread.tags.makernote import ( - apple as apple, - canon as canon, - casio as casio, - fujifilm as fujifilm, - nikon as nikon, - olympus as olympus, -) - -DEFAULT_STOP_TAG: str -FIELD_TYPES: Incomplete -IGNORE_TAGS: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/exif.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/exif.pyi deleted file mode 100644 index 569609e93f..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/exif.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from exifread._types import TagDict - -INTEROP_TAGS: TagDict -INTEROP_INFO: tuple[str, TagDict] -GPS_TAGS: TagDict -GPS_INFO: tuple[str, TagDict] -EXIF_TAGS: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/apple.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/apple.pyi deleted file mode 100644 index c72072637e..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/apple.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from exifread._types import TagDict - -TAGS: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/canon.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/canon.pyi deleted file mode 100644 index 97d4f93c53..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/canon.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from collections.abc import Callable -from typing import Any -from typing_extensions import TypeAlias - -from exifread._types import TagDict - -TAGS: TagDict - -CAMERA_SETTINGS: TagDict -FOCAL_LENGTH: TagDict -SHOT_INFO: TagDict -AF_INFO_2: TagDict -FILE_INFO: TagDict - -def add_one(value: int) -> int: ... -def subtract_one(value: int) -> int: ... -def convert_temp(value: int) -> str: ... - -_CameraInfo: TypeAlias = dict[int, tuple[str, str, Callable[[int], Any]]] - -CAMERA_INFO_TAG_NAME: str -CAMERA_INFO_5D: _CameraInfo -CAMERA_INFO_5DMKII: _CameraInfo -CAMERA_INFO_5DMKIII: _CameraInfo -CAMERA_INFO_600D: _CameraInfo -CAMERA_INFO_MODEL_MAP: dict[str, _CameraInfo] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/casio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/casio.pyi deleted file mode 100644 index c72072637e..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/casio.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from exifread._types import TagDict - -TAGS: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi deleted file mode 100644 index c72072637e..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from exifread._types import TagDict - -TAGS: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/nikon.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/nikon.pyi deleted file mode 100644 index ffa7101f5d..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/nikon.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from exifread._types import TagDict - -def ev_bias(seq: list[int]) -> str: ... - -TAGS_NEW: TagDict -TAGS_OLD: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/olympus.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/olympus.pyi deleted file mode 100644 index 0744c77382..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/olympus.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from exifread._types import TagDict - -def special_mode(val: bytes) -> str: ... - -TAGS: TagDict -TAG_0x2020: TagDict diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/utils.pyi deleted file mode 100644 index 1dace202cb..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/utils.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from collections.abc import Mapping -from fractions import Fraction -from typing import Any, TypeVar, overload -from typing_extensions import Self - -_T = TypeVar("_T") - -@overload -def ord_(dta: str) -> int: ... -@overload -def ord_(dta: _T) -> _T: ... -def make_string(seq: str | list[int]) -> str: ... -def make_string_uc(seq: str | list[int]) -> str: ... -def get_gps_coords(tags: Mapping[str, Any]) -> tuple[float, float]: ... - -class Ratio(Fraction): - def __new__(cls, numerator: int = 0, denominator: int | None = None) -> Self: ... - @property - def num(self) -> int: ... - @property - def den(self) -> int: ... - def decimal(self) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SocketIO/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SocketIO/METADATA.toml index 658dde445b..1fc858c968 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SocketIO/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SocketIO/METADATA.toml @@ -1,3 +1,3 @@ -version = "5.5.*" +version = "5.6.*" requires = ["Flask>=0.9"] upstream_repository = "https://github.com/miguelgrinberg/flask-socketio" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml index 9a121e5aeb..348b7b2626 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.9.*" +version = "3.10.*" upstream_repository = "https://github.com/Python-Markdown/markdown" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/htmlparser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/htmlparser.pyi index c9319565b9..3e6fbfa305 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/htmlparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/htmlparser.pyi @@ -6,6 +6,8 @@ from collections.abc import Sequence from markdown import Markdown spec: ModuleSpec +commentclose: re.Pattern[str] +commentabruptclose: re.Pattern[str] blank_line_re: re.Pattern[str] class HTMLExtractor(htmlparser.HTMLParser): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi index 4a9dfca63f..2df8877356 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi @@ -17,6 +17,7 @@ class Cursor: messages: Any errorhandler: Any lastrowid: int + warning_count: int def __init__(self, connection: Connection[Any]) -> None: ... def close(self) -> None: ... def setinputsizes(self, *args) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/console.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/console.pyi index 0de5c60fba..ea9baa94f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/console.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/console.pyi @@ -1,9 +1,9 @@ -from typing import Any +from _typeshed import Incomplete esc: str -codes: Any -dark_colors: Any -light_colors: Any +codes: Incomplete +dark_colors: Incomplete +light_colors: Incomplete def reset_color(): ... def colorize(color_key, text): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filter.pyi index d11b994f87..917273f6bc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filter.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from collections.abc import Iterable, Iterator -from typing import Any from pygments.lexer import Lexer from pygments.token import _TokenType @@ -8,11 +8,11 @@ def apply_filters(stream, filters, lexer=None): ... def simplefilter(f): ... class Filter: - options: Any + options: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class FunctionFilter(Filter): - function: Any + function: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi index 05325c8c63..ee98372d3f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from collections.abc import Generator, Iterable, Iterator -from typing import Any from pygments.filter import Filter from pygments.lexer import Lexer @@ -7,46 +7,46 @@ from pygments.token import _TokenType def find_filter_class(filtername): ... def get_filter_by_name(filtername, **options): ... -def get_all_filters() -> Generator[str, None, None]: ... +def get_all_filters() -> Generator[str]: ... class CodeTagFilter(Filter): - tag_re: Any + tag_re: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class SymbolFilter(Filter): - latex_symbols: Any - isabelle_symbols: Any - lang_map: Any - symbols: Any + latex_symbols: Incomplete + isabelle_symbols: Incomplete + lang_map: Incomplete + symbols: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class KeywordCaseFilter(Filter): - convert: Any + convert: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class NameHighlightFilter(Filter): - names: Any - tokentype: Any + names: Incomplete + tokentype: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class ErrorToken(Exception): ... class RaiseOnErrorTokenFilter(Filter): - exception: Any + exception: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class VisibleWhitespaceFilter(Filter): - wstt: Any + wstt: Incomplete def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class GobbleFilter(Filter): - n: Any + n: Incomplete def __init__(self, **options) -> None: ... def gobble(self, value, left): ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... @@ -55,4 +55,4 @@ class TokenMergeFilter(Filter): def __init__(self, **options) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... -FILTERS: Any +FILTERS: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi index f13948978e..254f68529f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi @@ -1,17 +1,18 @@ -from typing import Any, Generic, TypeVar, overload +from _typeshed import Incomplete +from typing import Generic, TypeVar, overload _T = TypeVar("_T", str, bytes) class Formatter(Generic[_T]): - name: Any - aliases: Any - filenames: Any + name: Incomplete + aliases: Incomplete + filenames: Incomplete unicodeoutput: bool - style: Any - full: Any - title: Any - encoding: Any - options: Any + style: Incomplete + full: Incomplete + title: Incomplete + encoding: Incomplete + options: Incomplete @overload def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options) -> None: ... @overload diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi index 09fc071c81..f1b275f58f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from collections.abc import Generator -from typing import Any from ..formatter import Formatter from .bbcode import BBCodeFormatter as BBCodeFormatter @@ -19,7 +19,7 @@ from .svg import SvgFormatter as SvgFormatter from .terminal import TerminalFormatter as TerminalFormatter from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter -def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ... +def get_all_formatters() -> Generator[type[Formatter[Incomplete]]]: ... def get_formatter_by_name(_alias, **options): ... def load_formatter_from_file(filename, formattername: str = "CustomFormatter", **options): ... def get_formatter_for_filename(fn, **options): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/_mapping.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/_mapping.pyi index a9e5864b9c..4ca06c4415 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/_mapping.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/_mapping.pyi @@ -1,3 +1,3 @@ -from typing import Any +from _typeshed import Incomplete -FORMATTERS: Any +FORMATTERS: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi index df1708bec2..26758a0146 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,7 +7,7 @@ _T = TypeVar("_T", str, bytes) class BBCodeFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - styles: Any + aliases: Incomplete + filenames: Incomplete + styles: Incomplete def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi index 234ff59e1d..5d5c78f4be 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,32 +7,32 @@ _T = TypeVar("_T", str, bytes) class HtmlFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - title: Any - nowrap: Any - noclasses: Any - classprefix: Any - cssclass: Any - cssstyles: Any - prestyles: Any - cssfile: Any - noclobber_cssfile: Any - tagsfile: Any - tagurlformat: Any - filename: Any - wrapcode: Any - span_element_openers: Any + aliases: Incomplete + filenames: Incomplete + title: Incomplete + nowrap: Incomplete + noclasses: Incomplete + classprefix: Incomplete + cssclass: Incomplete + cssstyles: Incomplete + prestyles: Incomplete + cssfile: Incomplete + noclobber_cssfile: Incomplete + tagsfile: Incomplete + tagurlformat: Incomplete + filename: Incomplete + wrapcode: Incomplete + span_element_openers: Incomplete linenos: int - linenostart: Any - linenostep: Any - linenospecial: Any - nobackground: Any - lineseparator: Any - lineanchors: Any - linespans: Any - anchorlinenos: Any - hl_lines: Any + linenostart: Incomplete + linenostep: Incomplete + linenospecial: Incomplete + nobackground: Incomplete + lineseparator: Incomplete + lineanchors: Incomplete + linespans: Incomplete + anchorlinenos: Incomplete + hl_lines: Incomplete def get_style_defs(self, arg=None): ... def get_token_style_defs(self, arg=None): ... def get_background_style_defs(self, arg=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi index 67310cfd2e..68b30d1890 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -8,10 +9,10 @@ class PilNotAvailable(ImportError): ... class FontNotFound(Exception): ... class FontManager: - font_name: Any - font_size: Any - fonts: Any - encoding: Any + font_name: Incomplete + font_size: Incomplete + fonts: Incomplete + encoding: Incomplete variable: bool def __init__(self, font_name, font_size: int = 14) -> None: ... def get_char_size(self): ... @@ -21,48 +22,48 @@ class FontManager: class ImageFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete unicodeoutput: bool default_image_format: str encoding: str - styles: Any + styles: Incomplete background_color: str - image_format: Any - image_pad: Any - line_pad: Any - fonts: Any - line_number_fg: Any - line_number_bg: Any - line_number_chars: Any - line_number_bold: Any - line_number_italic: Any - line_number_pad: Any - line_numbers: Any - line_number_separator: Any - line_number_step: Any - line_number_start: Any - line_number_width: Any - hl_lines: Any - hl_color: Any - drawables: Any + image_format: Incomplete + image_pad: Incomplete + line_pad: Incomplete + fonts: Incomplete + line_number_fg: Incomplete + line_number_bg: Incomplete + line_number_chars: Incomplete + line_number_bold: Incomplete + line_number_italic: Incomplete + line_number_pad: Incomplete + line_numbers: Incomplete + line_number_separator: Incomplete + line_number_step: Incomplete + line_number_start: Incomplete + line_number_width: Incomplete + hl_lines: Incomplete + hl_color: Incomplete + drawables: Incomplete def get_style_defs(self, arg: str = "") -> None: ... def format(self, tokensource, outfile) -> None: ... class GifImageFormatter(ImageFormatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete default_image_format: str class JpgImageFormatter(ImageFormatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete default_image_format: str class BmpImageFormatter(ImageFormatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete default_image_format: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi index 7af728d144..00b45e671e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,9 +7,9 @@ _T = TypeVar("_T", str, bytes) class IRCFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - darkbg: Any - colorscheme: Any - linenos: Any + aliases: Incomplete + filenames: Incomplete + darkbg: Incomplete + colorscheme: Incomplete + linenos: Incomplete def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi index 1cdbf7b2c0..be793c6662 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter from pygments.lexer import Lexer @@ -7,28 +8,28 @@ _T = TypeVar("_T", str, bytes) class LatexFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - docclass: Any - preamble: Any - linenos: Any - linenostart: Any - linenostep: Any - verboptions: Any - nobackground: Any - commandprefix: Any - texcomments: Any - mathescape: Any - escapeinside: Any - left: Any - right: Any - envname: Any + aliases: Incomplete + filenames: Incomplete + docclass: Incomplete + preamble: Incomplete + linenos: Incomplete + linenostart: Incomplete + linenostep: Incomplete + verboptions: Incomplete + nobackground: Incomplete + commandprefix: Incomplete + texcomments: Incomplete + mathescape: Incomplete + escapeinside: Incomplete + left: Incomplete + right: Incomplete + envname: Incomplete def get_style_defs(self, arg: str = ""): ... def format_unencoded(self, tokensource, outfile) -> None: ... class LatexEmbeddedLexer(Lexer): - left: Any - right: Any - lang: Any + left: Incomplete + right: Incomplete + lang: Incomplete def __init__(self, left, right, lang, **options) -> None: ... def get_tokens_unprocessed(self, text): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi index b3f42c323c..44b16833aa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,21 +7,21 @@ _T = TypeVar("_T", str, bytes) class NullFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete def format(self, tokensource, outfile) -> None: ... class RawTokenFormatter(Formatter[bytes]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete unicodeoutput: bool encoding: str - compress: Any - error_color: Any + compress: Incomplete + error_color: Incomplete def format(self, tokensource, outfile) -> None: ... class TestcaseFormatter(Formatter[_T]): name: str - aliases: Any + aliases: Incomplete def format(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi index d266bbe3f2..aadc174732 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,7 +7,7 @@ _T = TypeVar("_T", str, bytes) class PangoMarkupFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - styles: Any + aliases: Incomplete + filenames: Incomplete + styles: Incomplete def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi index 900f43d6f3..da95cd0d1d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,8 +7,8 @@ _T = TypeVar("_T", str, bytes) class RtfFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - fontface: Any - fontsize: Any + aliases: Incomplete + filenames: Incomplete + fontface: Incomplete + fontsize: Incomplete def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi index f349157c0c..8f98d0bbf1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,17 +7,17 @@ _T = TypeVar("_T", str, bytes) class SvgFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - nowrap: Any - fontfamily: Any - fontsize: Any - xoffset: Any - yoffset: Any - ystep: Any - spacehack: Any - linenos: Any - linenostart: Any - linenostep: Any - linenowidth: Any + aliases: Incomplete + filenames: Incomplete + nowrap: Incomplete + fontfamily: Incomplete + fontsize: Incomplete + xoffset: Incomplete + yoffset: Incomplete + ystep: Incomplete + spacehack: Incomplete + linenos: Incomplete + linenostart: Incomplete + linenostep: Incomplete + linenowidth: Incomplete def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi index 7448f17a69..90ca462d6b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi @@ -1,4 +1,5 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter @@ -6,10 +7,10 @@ _T = TypeVar("_T", str, bytes) class TerminalFormatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - darkbg: Any - colorscheme: Any - linenos: Any + aliases: Incomplete + filenames: Incomplete + darkbg: Incomplete + colorscheme: Incomplete + linenos: Incomplete def format(self, tokensource, outfile): ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi index e494e05467..33f17c176f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi @@ -1,15 +1,16 @@ -from typing import Any, TypeVar +from _typeshed import Incomplete +from typing import TypeVar from pygments.formatter import Formatter _T = TypeVar("_T", str, bytes) class EscapeSequence: - fg: Any - bg: Any - bold: Any - underline: Any - italic: Any + fg: Incomplete + bg: Incomplete + bold: Incomplete + underline: Incomplete + italic: Incomplete def __init__(self, fg=None, bg=None, bold: bool = False, underline: bool = False, italic: bool = False) -> None: ... def escape(self, attrs): ... def color_string(self): ... @@ -18,19 +19,19 @@ class EscapeSequence: class Terminal256Formatter(Formatter[_T]): name: str - aliases: Any - filenames: Any - xterm_colors: Any - best_match: Any - style_string: Any - usebold: Any - useunderline: Any - useitalic: Any - linenos: Any + aliases: Incomplete + filenames: Incomplete + xterm_colors: Incomplete + best_match: Incomplete + style_string: Incomplete + usebold: Incomplete + useunderline: Incomplete + useitalic: Incomplete + linenos: Incomplete def format(self, tokensource, outfile): ... def format_unencoded(self, tokensource, outfile) -> None: ... class TerminalTrueColorFormatter(Terminal256Formatter[_T]): name: str - aliases: Any - filenames: Any + aliases: Incomplete + filenames: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi index 128c5dcaf7..3463d6a6e4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi @@ -1,19 +1,18 @@ from _typeshed import FileDescriptorOrPath, StrPath from collections.abc import Iterator -from typing import Any from pygments.lexer import Lexer, LexerMeta def get_all_lexers(plugins: bool = True) -> Iterator[tuple[str, tuple[str, ...], tuple[str, ...], tuple[str, ...]]]: ... def find_lexer_class(name: str) -> LexerMeta | None: ... def find_lexer_class_by_name(_alias: str) -> LexerMeta: ... -def get_lexer_by_name(_alias: str, **options: Any) -> Lexer: ... -def load_lexer_from_file(filename: FileDescriptorOrPath, lexername: str = "CustomLexer", **options: Any) -> Lexer: ... +def get_lexer_by_name(_alias: str, **options) -> Lexer: ... +def load_lexer_from_file(filename: FileDescriptorOrPath, lexername: str = "CustomLexer", **options) -> Lexer: ... def find_lexer_class_for_filename(_fn: StrPath, code: str | bytes | None = None) -> LexerMeta | None: ... -def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = None, **options: Any) -> Lexer: ... -def get_lexer_for_mimetype(_mime: str, **options: Any) -> Lexer: ... -def guess_lexer_for_filename(_fn: StrPath, _text: str, **options: Any) -> Lexer: ... -def guess_lexer(_text: str | bytes, **options: Any) -> Lexer: ... +def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = None, **options) -> Lexer: ... +def get_lexer_for_mimetype(_mime: str, **options) -> Lexer: ... +def guess_lexer_for_filename(_fn: StrPath, _text: str, **options) -> Lexer: ... +def guess_lexer(_text: str | bytes, **options) -> Lexer: ... # Having every lexer class here doesn't seem to be worth it def __getattr__(name: str): ... # incomplete module diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi index e47d66b02c..25253818d7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi @@ -1,6 +1,6 @@ import sys +from _typeshed import Incomplete from collections.abc import Generator -from typing import Any from pygments.filter import Filter from pygments.formatter import Formatter @@ -21,7 +21,7 @@ else: def iter_entry_points(group_name: str) -> tuple[EntryPoint, ...] | list[EntryPoint]: ... -def find_plugin_lexers() -> Generator[type[Lexer], None, None]: ... -def find_plugin_formatters() -> Generator[tuple[str, type[Formatter[Any]]], None, None]: ... -def find_plugin_styles() -> Generator[tuple[str, type[Style]], None, None]: ... -def find_plugin_filters() -> Generator[tuple[str, type[Filter]], None, None]: ... +def find_plugin_lexers() -> Generator[type[Lexer]]: ... +def find_plugin_formatters() -> Generator[tuple[str, type[Formatter[Incomplete]]]]: ... +def find_plugin_styles() -> Generator[tuple[str, type[Style]]]: ... +def find_plugin_filters() -> Generator[tuple[str, type[Filter]]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/regexopt.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/regexopt.pyi index 0d5b90a96a..f289a99507 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/regexopt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/regexopt.pyi @@ -1,7 +1,7 @@ -from typing import Any +from _typeshed import Incomplete -CS_ESCAPE: Any -FIRST_ELEMENT: Any +CS_ESCAPE: Incomplete +FIRST_ELEMENT: Incomplete def make_charset(letters): ... def regex_opt_inner(strings, open_paren): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/scanner.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/scanner.pyi index df5c2c886f..29fd64f876 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/scanner.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/scanner.pyi @@ -1,15 +1,15 @@ -from typing import Any +from _typeshed import Incomplete class EndOfText(RuntimeError): ... class Scanner: - data: Any - data_length: Any + data: Incomplete + data_length: Incomplete start_pos: int pos: int - flags: Any - last: Any - match: Any + flags: Incomplete + last: Incomplete + match: Incomplete def __init__(self, text, flags: int = 0) -> None: ... @property def eos(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/unistring.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/unistring.pyi index 6dd2b3fcea..bebf225e36 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/unistring.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/unistring.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete Cc: str Cf: str @@ -32,7 +32,7 @@ Zp: str Zs: str xid_continue: str xid_start: str -cats: Any +cats: Incomplete def combine(*args): ... def allexcept(*args): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/util.pyi index 963a810038..6a4076719e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/util.pyi @@ -1,10 +1,10 @@ +from _typeshed import Incomplete from io import TextIOWrapper -from typing import Any -split_path_re: Any -doctype_lookup_re: Any -tag_re: Any -xml_decl_re: Any +split_path_re: Incomplete +doctype_lookup_re: Incomplete +tag_re: Incomplete +xml_decl_re: Incomplete class ClassNotFound(ValueError): ... class OptionError(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index aeca2e341e..b6a2b43973 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -1,19 +1,18 @@ -from _typeshed import Incomplete - from antlr4.error.Errors import IllegalStateException as IllegalStateException +from antlr4.Lexer import Lexer as ActualLexer, TokenSource from antlr4.Token import Token as Token -Lexer: Incomplete +Lexer: None class TokenStream: ... class BufferedTokenStream(TokenStream): __slots__ = ("tokenSource", "tokens", "index", "fetchedEOF") - tokenSource: Incomplete - tokens: Incomplete + tokenSource: TokenSource + tokens: list[Token] index: int fetchedEOF: bool - def __init__(self, tokenSource: Lexer) -> None: ... + def __init__(self, tokenSource: ActualLexer | None) -> None: ... def mark(self) -> int: ... def release(self, marker: int) -> None: ... def reset(self) -> None: ... @@ -26,15 +25,15 @@ class BufferedTokenStream(TokenStream): def LA(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... def LT(self, k: int) -> Token | None: ... - def adjustSeekIndex(self, i: int): ... + def adjustSeekIndex(self, i: int) -> int: ... def lazyInit(self) -> None: ... def setup(self) -> None: ... - def setTokenSource(self, tokenSource: Lexer): ... - def nextTokenOnChannel(self, i: int, channel: int): ... - def previousTokenOnChannel(self, i: int, channel: int): ... - def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ... - def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ... - def filterForChannel(self, left: int, right: int, channel: int): ... - def getSourceName(self): ... - def getText(self, start: int | None = None, stop: int | None = None): ... + def setTokenSource(self, tokenSource: ActualLexer | None) -> None: ... + def nextTokenOnChannel(self, i: int, channel: int) -> int: ... + def previousTokenOnChannel(self, i: int, channel: int) -> int: ... + def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ... + def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ... + def filterForChannel(self, left: int, right: int, channel: int) -> list[Token] | None: ... + def getSourceName(self) -> str: ... + def getText(self, start: int | None = None, stop: int | None = None) -> str: ... def fill(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index 447f2a6f62..e654055b72 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -1,15 +1,23 @@ -from _typeshed import Incomplete - +from antlr4.InputStream import InputStream +from antlr4.Lexer import TokenSource from antlr4.Token import CommonToken as CommonToken class TokenFactory: ... class CommonTokenFactory(TokenFactory): __slots__ = "copyText" - DEFAULT: Incomplete - copyText: Incomplete + DEFAULT: CommonTokenFactory | None + copyText: bool def __init__(self, copyText: bool = False) -> None: ... def create( - self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int - ): ... - def createThin(self, type: int, text: str): ... + self, + source: tuple[TokenSource, InputStream], + type: int, + text: str, + channel: int, + start: int, + stop: int, + line: int, + column: int, + ) -> CommonToken: ... + def createThin(self, type: int, text: str) -> CommonToken: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi index 304106cd8b..ad5d92d3bc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi @@ -1,12 +1,10 @@ -from _typeshed import Incomplete - from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream from antlr4.Lexer import Lexer as Lexer from antlr4.Token import Token as Token class CommonTokenStream(BufferedTokenStream): __slots__ = "channel" - channel: Incomplete + channel: int def __init__(self, lexer: Lexer, channel: int = 0) -> None: ... def adjustSeekIndex(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi index dc759a462f..75ae77b244 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi @@ -1,9 +1,7 @@ -from _typeshed import Incomplete - from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): __slots__ = "fileName" - fileName: Incomplete + fileName: str def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ... - def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict") -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi index 3a5991e5ba..c4afb48832 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi @@ -1,22 +1,24 @@ -from _typeshed import Incomplete +from typing import Literal from antlr4.Token import Token as Token class InputStream: __slots__ = ("name", "strdata", "_index", "data", "_size") name: str - strdata: Incomplete - data: Incomplete + strdata: str + data: list[int] + _index: int + _size: int def __init__(self, data: str) -> None: ... @property - def index(self): ... + def index(self) -> int: ... @property - def size(self): ... + def size(self) -> int: ... def reset(self) -> None: ... def consume(self) -> None: ... - def LA(self, offset: int): ... - def LT(self, offset: int): ... - def mark(self): ... - def release(self, marker: int): ... - def seek(self, _index: int): ... - def getText(self, start: int, stop: int): ... + def LA(self, offset: int) -> int: ... + def LT(self, offset: int) -> int: ... + def mark(self) -> Literal[-1]: ... + def release(self, marker: int) -> None: ... + def seek(self, _index: int) -> None: ... + def getText(self, start: int, stop: int) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi index 1ac2177ffb..a2690aa700 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from typing import TextIO from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator @@ -10,7 +9,7 @@ from antlr4.error.Errors import ( ) from antlr4.InputStream import InputStream as InputStream from antlr4.Recognizer import Recognizer as Recognizer -from antlr4.Token import Token as Token +from antlr4.Token import CommonToken, Token as Token class TokenSource: ... @@ -34,47 +33,62 @@ class Lexer(Recognizer, TokenSource): DEFAULT_MODE: int MORE: int SKIP: int - DEFAULT_TOKEN_CHANNEL: Incomplete - HIDDEN: Incomplete + DEFAULT_TOKEN_CHANNEL: int + HIDDEN: int MIN_CHAR_VALUE: int MAX_CHAR_VALUE: int + _input: InputStream + _output: TextIO + _factory: CommonTokenFactory + _tokenFactorySourcePair: tuple[TokenSource, InputStream] + _interp: LexerATNSimulator + _token: Token | None + _tokenStartCharIndex: int + _tokenStartLine: int + _tokenStartColumn: int + _hitEOF: bool + _channel: int + _type: int + _modeStack: list[int] + _mode: int + _text: str | None def __init__(self, input: InputStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def nextToken(self): ... + def nextToken(self) -> Token | None: ... def skip(self) -> None: ... def more(self) -> None: ... - def mode(self, m: int): ... - def pushMode(self, m: int): ... - def popMode(self): ... + def mode(self, m: int) -> None: ... + def pushMode(self, m: int) -> None: ... + def popMode(self) -> int: ... @property - def inputStream(self): ... + def inputStream(self) -> InputStream: ... @inputStream.setter - def inputStream(self, input: InputStream): ... + def inputStream(self, input: InputStream) -> None: ... @property - def sourceName(self): ... - def emitToken(self, token: Token): ... - def emit(self): ... - def emitEOF(self): ... + def sourceName(self) -> str: ... + def emitToken(self, token: Token) -> None: ... + def emit(self) -> CommonToken: ... + def emitEOF(self) -> CommonToken: ... @property - def type(self): ... + def type(self) -> int: ... @type.setter - def type(self, type: int): ... + def type(self, type: int) -> None: ... @property - def line(self): ... + def line(self) -> int: ... @line.setter - def line(self, line: int): ... + def line(self, line: int) -> None: ... @property - def column(self): ... + def column(self) -> int: ... @column.setter - def column(self, column: int): ... - def getCharIndex(self): ... + def column(self, column: int) -> None: ... + def getCharIndex(self) -> int: ... @property - def text(self): ... + def text(self) -> str: ... @text.setter - def text(self, txt: str): ... - def getAllTokens(self): ... - def notifyListeners(self, e: LexerNoViableAltException): ... - def getErrorDisplay(self, s: str): ... - def getErrorDisplayForChar(self, c: str): ... - def getCharErrorDisplay(self, c: str): ... - def recover(self, re: RecognitionException): ... + def text(self, txt: str) -> None: ... + def getAllTokens(self) -> list[Token]: ... + def notifyListeners(self, e: LexerNoViableAltException) -> None: ... + def getErrorDisplay(self, s: str) -> str: ... + def getErrorDisplayForChar(self, c: str) -> str: ... + def getCharErrorDisplay(self, c: str) -> str: ... + def recover(self, re: RecognitionException) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index 42cf6443cc..3e4409dba8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete -from typing import TextIO +from typing import Literal, TextIO from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.atn.ParserATNSimulator import ParserATNSimulator from antlr4.BufferedTokenStream import TokenStream as TokenStream from antlr4.CommonTokenFactory import TokenFactory as TokenFactory from antlr4.error.Errors import ( @@ -16,6 +17,7 @@ from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext from antlr4.Recognizer import Recognizer as Recognizer from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token +from antlr4.tree.ParseTreePattern import ParseTreePattern from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode @@ -39,48 +41,59 @@ class Parser(Recognizer): "_parseListeners", "_syntaxErrors", ) - bypassAltsAtnCache: Incomplete + _input: TokenStream + _output: TextIO + _errHandler: DefaultErrorStrategy + _precedenceStack: list[int] + _ctx: ParserRuleContext | None + _tracer: TraceListener | None + _parseListeners: list[ParseTreeListener] + _syntaxErrors: int + _interp: ParserATNSimulator + bypassAltsAtnCache: dict[Incomplete, Incomplete] buildParseTrees: bool def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def match(self, ttype: int): ... - def matchWildcard(self): ... - def getParseListeners(self): ... - def addParseListener(self, listener: ParseTreeListener): ... - def removeParseListener(self, listener: ParseTreeListener): ... + def match(self, ttype: int) -> Token: ... + def matchWildcard(self) -> Token: ... + def getParseListeners(self) -> list[ParseTreeListener]: ... + def addParseListener(self, listener: ParseTreeListener) -> None: ... + def removeParseListener(self, listener: ParseTreeListener) -> None: ... def removeParseListeners(self) -> None: ... def triggerEnterRuleEvent(self) -> None: ... def triggerExitRuleEvent(self) -> None: ... - def getNumberOfSyntaxErrors(self): ... - def getTokenFactory(self): ... - def setTokenFactory(self, factory: TokenFactory): ... + def getNumberOfSyntaxErrors(self) -> int: ... + def getTokenFactory(self) -> TokenFactory: ... + def setTokenFactory(self, factory: TokenFactory) -> None: ... def getATNWithBypassAlts(self): ... - def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ... - def getInputStream(self): ... - def setInputStream(self, input: InputStream): ... - def getTokenStream(self): ... - def setTokenStream(self, input: TokenStream): ... - def getCurrentToken(self): ... - def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ... - def consume(self): ... + def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None) -> ParseTreePattern: ... + def getInputStream(self) -> InputStream: ... + def setInputStream(self, input: InputStream) -> None: ... + def getTokenStream(self) -> TokenStream: ... + def setTokenStream(self, input: TokenStream) -> None: ... + def getCurrentToken(self) -> Token | None: ... + def notifyErrorListeners( + self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None + ) -> None: ... + def consume(self) -> None: ... def addContextToParseTree(self) -> None: ... - state: Incomplete - def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... + state: int + def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ... def exitRule(self) -> None: ... - def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ... - def getPrecedence(self): ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... - def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... - def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ... - def getInvokingContext(self, ruleIndex: int): ... - def precpred(self, localctx: RuleContext, precedence: int): ... - def inContext(self, context: str): ... - def isExpectedToken(self, symbol: int): ... + def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int) -> None: ... + def getPrecedence(self) -> int: ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ... + def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ... + def unrollRecursionContexts(self, parentCtx: ParserRuleContext) -> None: ... + def getInvokingContext(self, ruleIndex: int) -> RuleContext | None: ... + def precpred(self, localctx: RuleContext, precedence: int) -> bool: ... + def inContext(self, context: str) -> Literal[False]: ... + def isExpectedToken(self, symbol: int) -> bool: ... def getExpectedTokens(self): ... def getExpectedTokensWithinCurrentRule(self): ... - def getRuleIndex(self, ruleName: str): ... - def getRuleInvocationStack(self, p: RuleContext | None = None): ... - def getDFAStrings(self): ... + def getRuleIndex(self, ruleName: str) -> int: ... + def getRuleInvocationStack(self, p: RuleContext | None = None) -> list[str]: ... + def getDFAStrings(self) -> list[str]: ... def dumpDFA(self) -> None: ... - def getSourceName(self): ... - def setTrace(self, trace: bool): ... + def getSourceName(self) -> str: ... + def setTrace(self, trace: bool) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi index b7dabbc4aa..121045923a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi @@ -28,19 +28,19 @@ class ParserInterpreter(Parser): "_parentContextStack", "pushRecursionContextStates", ) - grammarFileName: Incomplete - atn: Incomplete - tokenNames: Incomplete - ruleNames: Incomplete - decisionToDFA: Incomplete - sharedContextCache: Incomplete - pushRecursionContextStates: Incomplete + grammarFileName: str + atn: ATN + tokenNames: list[Incomplete] + ruleNames: list[str] + decisionToDFA: list[DFA] + sharedContextCache: PredictionContextCache + pushRecursionContextStates: set[int] def __init__( self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream ) -> None: ... - state: Incomplete - def parse(self, startRuleIndex: int): ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... - def getATNState(self): ... - def visitState(self, p: ATNState): ... - def visitRuleStopState(self, p: ATNState): ... + state: int + def parse(self, startRuleIndex: int) -> ParserRuleContext | None: ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ... + def getATNState(self) -> ATNState: ... + def visitState(self, p: ATNState) -> None: ... + def visitRuleStopState(self, p: ATNState) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index 73c5c5d752..ba07bd5fbe 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -1,6 +1,7 @@ -from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator +from typing import TypeVar +from antlr4.error.Errors import RecognitionException from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token from antlr4.tree.Tree import ( @@ -14,29 +15,35 @@ from antlr4.tree.Tree import ( class ParserRuleContext(RuleContext): __slots__ = ("children", "start", "stop", "exception") - children: Incomplete - start: Incomplete - stop: Incomplete - exception: Incomplete + children: list[ParseTree | TerminalNode] | None + start: Token | None + stop: Token | None + exception: RecognitionException | None def __init__(self, parent: ParserRuleContext | None = None, invokingStateNumber: int | None = None) -> None: ... - parentCtx: Incomplete - invokingState: Incomplete - def copyFrom(self, ctx: ParserRuleContext): ... - def enterRule(self, listener: ParseTreeListener): ... - def exitRule(self, listener: ParseTreeListener): ... - def addChild(self, child: ParseTree): ... + parentCtx: RuleContext | None + invokingState: int + def copyFrom(self, ctx: ParserRuleContext) -> None: ... + def enterRule(self, listener: ParseTreeListener) -> None: ... + def exitRule(self, listener: ParseTreeListener) -> None: ... + def addChild(self, child: _ParseTreeT) -> _ParseTreeT: ... def removeLastChild(self) -> None: ... - def addTokenNode(self, token: Token): ... - def addErrorNode(self, badToken: Token): ... - def getChild(self, i: int, ttype: type | None = None): ... - def getChildren(self, predicate=None) -> Generator[Incomplete, None, None]: ... - def getToken(self, ttype: int, i: int): ... - def getTokens(self, ttype: int): ... - def getTypedRuleContext(self, ctxType: type, i: int): ... - def getTypedRuleContexts(self, ctxType: type): ... - def getChildCount(self): ... - def getSourceInterval(self): ... + def addTokenNode(self, token: Token) -> TerminalNodeImpl: ... + def addErrorNode(self, badToken: Token) -> ErrorNodeImpl: ... + def getChild(self, i: int, ttype: type[_GenericType] | None = None) -> _GenericType | None: ... + def getChildren( + self, predicate: Callable[[ParseTree | TerminalNode], bool] | None = None + ) -> Generator[ParseTree | TerminalNode, None, None]: ... + def getToken(self, ttype: int, i: int) -> TerminalNode | None: ... + def getTokens(self, ttype: int) -> list[TerminalNode]: ... + def getTypedRuleContext(self, ctxType: type[_ParserRuleContextT], i: int) -> _ParserRuleContextT | None: ... + def getTypedRuleContexts(self, ctxType: type[_ParserRuleContextT]) -> list[_ParserRuleContextT]: ... + def getChildCount(self) -> int: ... + def getSourceInterval(self) -> tuple[int | None, int | None]: ... + +_GenericType = TypeVar("_GenericType", bound=type) +_ParseTreeT = TypeVar("_ParseTreeT", bound=ParseTree) +_ParserRuleContextT = TypeVar("_ParserRuleContextT", bound=ParserRuleContext) class InterpreterRuleContext(ParserRuleContext): - ruleIndex: Incomplete + ruleIndex: int def __init__(self, parent: ParserRuleContext, invokingStateNumber: int, ruleIndex: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi index 5e9fcc8bc0..a13b195f29 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi @@ -1,28 +1,37 @@ from _typeshed import Incomplete -from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListener, ProxyErrorListener as ProxyErrorListener +from antlr4.atn.ATNSimulator import ATNSimulator +from antlr4.error.ErrorListener import ( + ConsoleErrorListener as ConsoleErrorListener, + ErrorListener, + ProxyErrorListener as ProxyErrorListener, +) +from antlr4.error.Errors import RecognitionException from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token class Recognizer: __slots__ = ("_listeners", "_interp", "_stateNumber") - tokenTypeMapCache: Incomplete - ruleIndexMapCache: Incomplete + tokenTypeMapCache: dict[Incomplete, int] + ruleIndexMapCache: dict[str, int] + _listeners: list[ErrorListener] + _interp: ATNSimulator | None + _stateNumber: int def __init__(self) -> None: ... - def extractVersion(self, version): ... - def checkVersion(self, toolVersion) -> None: ... - def addErrorListener(self, listener) -> None: ... - def removeErrorListener(self, listener) -> None: ... + def extractVersion(self, version: str) -> tuple[str, str]: ... + def checkVersion(self, toolVersion: str) -> None: ... + def addErrorListener(self, listener: ErrorListener) -> None: ... + def removeErrorListener(self, listener: ErrorListener) -> None: ... def removeErrorListeners(self) -> None: ... - def getTokenTypeMap(self): ... - def getRuleIndexMap(self): ... - def getTokenType(self, tokenName: str): ... - def getErrorHeader(self, e): ... - def getTokenErrorDisplay(self, t: Token): ... - def getErrorListenerDispatch(self): ... - def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... - def precpred(self, localctx: RuleContext, precedence: int): ... + def getTokenTypeMap(self) -> dict[Incomplete, int]: ... + def getRuleIndexMap(self) -> dict[str, int]: ... + def getTokenType(self, tokenName: str) -> int: ... + def getErrorHeader(self, e: RecognitionException) -> str: ... + def getTokenErrorDisplay(self, t: Token | None) -> str: ... + def getErrorListenerDispatch(self) -> ProxyErrorListener: ... + def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> bool: ... + def precpred(self, localctx: RuleContext, precedence: int) -> bool: ... @property - def state(self): ... + def state(self) -> int: ... @state.setter - def state(self, atnState: int): ... + def state(self, atnState: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index c7bb19b4e3..925e2f9fab 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -1,29 +1,31 @@ -from _typeshed import Incomplete from collections.abc import Generator +from typing import Any, Literal +from typing_extensions import Self +from antlr4.Recognizer import Recognizer from antlr4.tree.Tree import INVALID_INTERVAL as INVALID_INTERVAL, ParseTreeVisitor as ParseTreeVisitor, RuleNode as RuleNode from antlr4.tree.Trees import Trees as Trees -Parser: Incomplete +Parser: None class RuleContext(RuleNode): __slots__ = ("parentCtx", "invokingState") - EMPTY: Incomplete - parentCtx: Incomplete - invokingState: Incomplete + EMPTY: RuleContext | None + parentCtx: RuleContext | None + invokingState: int def __init__(self, parent: RuleContext | None = None, invokingState: int = -1) -> None: ... - def depth(self): ... - def isEmpty(self): ... - def getSourceInterval(self): ... - def getRuleContext(self): ... - def getPayload(self): ... - def getText(self): ... - def getRuleIndex(self): ... - def getAltNumber(self): ... - def setAltNumber(self, altNumber: int): ... - def getChild(self, i: int): ... - def getChildCount(self): ... - def getChildren(self) -> Generator[Incomplete, None, None]: ... - def accept(self, visitor: ParseTreeVisitor): ... - def toStringTree(self, ruleNames: list[Incomplete] | None = None, recog: Parser | None = None): ... - def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ... + def depth(self) -> int: ... + def isEmpty(self) -> bool: ... + def getSourceInterval(self) -> tuple[int | None, int | None]: ... + def getRuleContext(self) -> Self: ... + def getPayload(self) -> Self: ... + def getText(self) -> str: ... + def getRuleIndex(self) -> Literal[-1]: ... + def getAltNumber(self) -> Literal[0]: ... + def setAltNumber(self, altNumber: int) -> None: ... + def getChild(self, i: int) -> Any: ... + def getChildCount(self) -> int: ... + def getChildren(self) -> Generator[Any, None, None]: ... + def accept(self, visitor: ParseTreeVisitor) -> None: ... + def toStringTree(self, ruleNames: list[str] | None = None, recog: Recognizer | None = None) -> str: ... + def toString(self, ruleNames: list[str], stop: RuleContext) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Token.pyi index 3ca3b3c58e..acbab1f198 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete +from antlr4.InputStream import InputStream +from antlr4.Lexer import TokenSource class Token: __slots__ = ("source", "type", "channel", "start", "stop", "tokenIndex", "line", "column", "_text") @@ -8,42 +9,42 @@ class Token: EOF: int DEFAULT_CHANNEL: int HIDDEN_CHANNEL: int - source: Incomplete - type: Incomplete - channel: Incomplete - start: Incomplete - stop: Incomplete - tokenIndex: Incomplete - line: Incomplete - column: Incomplete + source: tuple[TokenSource | None, InputStream | None] + type: int + channel: int + start: int + stop: int + tokenIndex: int | None + line: int + column: int def __init__(self) -> None: ... @property - def text(self): ... + def text(self) -> str: ... @text.setter - def text(self, text: str): ... - def getTokenSource(self): ... - def getInputStream(self): ... + def text(self, text: str) -> None: ... + def getTokenSource(self) -> TokenSource | None: ... + def getInputStream(self) -> InputStream | None: ... class CommonToken(Token): - EMPTY_SOURCE: Incomplete - source: Incomplete - type: Incomplete - channel: Incomplete - start: Incomplete - stop: Incomplete + EMPTY_SOURCE: tuple[None, None] + source: tuple[TokenSource | None, InputStream | None] + type: int + channel: int + start: int + stop: int tokenIndex: int - line: Incomplete - column: Incomplete + line: int + column: int def __init__( self, - source: tuple[Incomplete, Incomplete] = (None, None), + source: tuple[TokenSource | None, InputStream | None] = (None, None), type: int | None = None, channel: int = 0, start: int = -1, stop: int = -1, ) -> None: ... - def clone(self): ... + def clone(self) -> CommonToken: ... @property - def text(self): ... + def text(self) -> str: ... @text.setter - def text(self, text: str): ... + def text(self, text: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi index c50e70da58..0eca91c7d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi @@ -1,5 +1,7 @@ from _typeshed import Incomplete +from typing import Any, Literal, TypeVar +from antlr4.ParserRuleContext import ParserRuleContext, RuleContext from antlr4.Token import Token as Token INVALID_INTERVAL: Incomplete @@ -11,42 +13,44 @@ class RuleNode(ParseTree): ... class TerminalNode(ParseTree): ... class ErrorNode(TerminalNode): ... +_GenericType = TypeVar("_GenericType", bound=type) + class ParseTreeVisitor: - def visit(self, tree): ... - def visitChildren(self, node): ... - def visitTerminal(self, node): ... - def visitErrorNode(self, node): ... + def visit(self, tree: Tree) -> None: ... + def visitChildren(self, node) -> None: ... + def visitTerminal(self, node: TerminalNode) -> None: ... + def visitErrorNode(self, node: ErrorNode) -> None: ... def defaultResult(self) -> None: ... - def aggregateResult(self, aggregate, nextResult): ... - def shouldVisitNextChild(self, node, currentResult): ... + def aggregateResult(self, aggregate, nextResult: _GenericType) -> _GenericType: ... + def shouldVisitNextChild(self, node, currentResult) -> Literal[True]: ... class ParseTreeListener: - def visitTerminal(self, node: TerminalNode): ... - def visitErrorNode(self, node: ErrorNode): ... - def enterEveryRule(self, ctx): ... - def exitEveryRule(self, ctx): ... + def visitTerminal(self, node: TerminalNode) -> None: ... + def visitErrorNode(self, node: ErrorNode) -> None: ... + def enterEveryRule(self, ctx: ParserRuleContext) -> None: ... + def exitEveryRule(self, ctx: ParserRuleContext) -> None: ... class TerminalNodeImpl(TerminalNode): __slots__ = ("parentCtx", "symbol") - parentCtx: Incomplete - symbol: Incomplete + parentCtx: RuleContext | None + symbol: Token def __init__(self, symbol: Token) -> None: ... - def __setattr__(self, key, value) -> None: ... - def getChild(self, i: int): ... - def getSymbol(self): ... - def getParent(self): ... - def getPayload(self): ... - def getSourceInterval(self): ... - def getChildCount(self): ... - def accept(self, visitor: ParseTreeVisitor): ... - def getText(self): ... + def __setattr__(self, key: str, value: Any) -> None: ... + def getChild(self, i: int) -> None: ... + def getSymbol(self) -> Token: ... + def getParent(self) -> RuleContext | None: ... + def getPayload(self) -> Token: ... + def getSourceInterval(self) -> tuple[Literal[-1], Literal[-2]] | tuple[int | None, int | None]: ... + def getChildCount(self) -> Literal[0]: ... + def accept(self, visitor: ParseTreeVisitor) -> None: ... + def getText(self) -> str: ... class ErrorNodeImpl(TerminalNodeImpl, ErrorNode): def __init__(self, token: Token) -> None: ... - def accept(self, visitor: ParseTreeVisitor): ... + def accept(self, visitor: ParseTreeVisitor) -> None: ... class ParseTreeWalker: - DEFAULT: Incomplete - def walk(self, listener: ParseTreeListener, t: ParseTree): ... - def enterRule(self, listener: ParseTreeListener, r: RuleNode): ... - def exitRule(self, listener: ParseTreeListener, r: RuleNode): ... + DEFAULT: ParseTreeWalker + def walk(self, listener: ParseTreeListener, t: ParseTree) -> None: ... + def enterRule(self, listener: ParseTreeListener, r: RuleNode) -> None: ... + def exitRule(self, listener: ParseTreeListener, r: RuleNode) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/assertpy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/assertpy.pyi index 3000dfd1e1..5366825bcb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/assertpy.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/assertpy.pyi @@ -1,6 +1,6 @@ import logging from collections.abc import Callable, Generator -from typing import Any +from typing import Any, TypeVar from typing_extensions import Self from .base import BaseMixin @@ -17,11 +17,14 @@ from .numeric import NumericMixin from .snapshot import SnapshotMixin from .string import StringMixin +_T = TypeVar("_T") +_V = TypeVar("_V", default=Any) + __version__: str __tracebackhide__: bool class WarningLoggingAdapter(logging.LoggerAdapter[logging.Logger]): - def process(self, msg: str, kwargs: Any) -> tuple[str, Any]: ... + def process(self, msg: str, kwargs: _T) -> tuple[str, _T]: ... class AssertionBuilder( StringMixin, @@ -34,18 +37,18 @@ class AssertionBuilder( DynamicMixin, DictMixin, DateMixin, - ContainsMixin, - CollectionMixin, + ContainsMixin[_V], + CollectionMixin[_V], BaseMixin, ): - val: Any + val: _V description: str kind: str | None expected: BaseException | None logger: logging.Logger def __init__( self, - val: Any, + val: _V, description: str = "", kind: str | None = None, expected: BaseException | None = None, @@ -53,7 +56,7 @@ class AssertionBuilder( ) -> None: ... def builder( self, - val: Any, + val: _V, description: str = "", kind: str | None = None, expected: BaseException | None = None, @@ -61,9 +64,9 @@ class AssertionBuilder( ) -> Self: ... def error(self, msg: str) -> Self: ... -def soft_assertions() -> Generator[None, None, None]: ... -def assert_that(val: Any, description: str = "") -> AssertionBuilder: ... -def assert_warn(val: Any, description: str = "", logger: logging.Logger | None = None) -> AssertionBuilder: ... +def soft_assertions() -> Generator[None]: ... +def assert_that(val: _V, description: str = "") -> AssertionBuilder[_V]: ... +def assert_warn(val: _V, description: str = "", logger: logging.Logger | None = None) -> AssertionBuilder: ... def fail(msg: str = "") -> None: ... def soft_fail(msg: str = "") -> None: ... def add_extension(func: Callable[[AssertionBuilder], AssertionBuilder]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/base.pyi index 8181d33de6..be5db39ff9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/base.pyi @@ -1,4 +1,3 @@ -from typing import Any from typing_extensions import Self, TypeAlias __tracebackhide__: bool @@ -8,10 +7,10 @@ _IncludeIgnore: TypeAlias = str | list[str] | list[tuple[str, ...]] | None class BaseMixin: description: str def described_as(self, description: str) -> Self: ... - def is_equal_to(self, other: Any, *, include: _IncludeIgnore = None, ignore: _IncludeIgnore = None) -> Self: ... - def is_not_equal_to(self, other: Any) -> Self: ... - def is_same_as(self, other: Any) -> Self: ... - def is_not_same_as(self, other: Any) -> Self: ... + def is_equal_to(self, other: object, *, include: _IncludeIgnore = None, ignore: _IncludeIgnore = None) -> Self: ... + def is_not_equal_to(self, other: object) -> Self: ... + def is_same_as(self, other: object) -> Self: ... + def is_not_same_as(self, other: object) -> Self: ... def is_true(self) -> Self: ... def is_false(self) -> Self: ... def is_none(self) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/collection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/collection.pyi index 68485dbefd..9f6d22bc98 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/collection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/collection.pyi @@ -1,11 +1,19 @@ +from _typeshed import SupportsRichComparison from collections.abc import Callable -from typing import Any +from typing import Any, Generic, Literal, TypeVar, overload from typing_extensions import Self __tracebackhide__: bool -class CollectionMixin: +_V = TypeVar("_V", default=Any) + +class CollectionMixin(Generic[_V]): def is_iterable(self) -> Self: ... def is_not_iterable(self) -> Self: ... - def is_subset_of(self, *supersets: Any) -> Self: ... - def is_sorted(self, key: Callable[[Any], Any] = ..., reverse: bool = False) -> Self: ... + def is_subset_of(self, *supersets: _V) -> Self: ... + @overload + def is_sorted(self, key: Callable[[_V], SupportsRichComparison] = ..., reverse: Literal[False] = False) -> Self: ... + @overload + def is_sorted(self, *, reverse: Literal[True]) -> Self: ... + @overload + def is_sorted(self, key: Callable[[_V], SupportsRichComparison], reverse: Literal[True]) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/contains.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/contains.pyi index 2953e40c96..6c59d2a5f8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/contains.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/contains.pyi @@ -1,16 +1,18 @@ -from typing import Any +from typing import Any, Generic, TypeVar from typing_extensions import Self __tracebackhide__: bool -class ContainsMixin: - def contains(self, *items: Any) -> Self: ... - def does_not_contain(self, *items: Any) -> Self: ... - def contains_only(self, *items: Any) -> Self: ... - def contains_sequence(self, *items: Any) -> Self: ... +_V = TypeVar("_V", default=Any) + +class ContainsMixin(Generic[_V]): + def contains(self, *items: object) -> Self: ... + def does_not_contain(self, *items: object) -> Self: ... + def contains_only(self, *items: object) -> Self: ... + def contains_sequence(self, *items: object) -> Self: ... def contains_duplicates(self) -> Self: ... def does_not_contain_duplicates(self) -> Self: ... def is_empty(self) -> Self: ... def is_not_empty(self) -> Self: ... - def is_in(self, *items: Any) -> Self: ... - def is_not_in(self, *items: Any) -> Self: ... + def is_in(self, *items: _V) -> Self: ... + def is_not_in(self, *items: _V) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dict.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dict.pyi index f88e86db72..0f04642291 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dict.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dict.pyi @@ -1,12 +1,14 @@ +from collections.abc import Iterable from typing import Any from typing_extensions import Self __tracebackhide__: bool class DictMixin: - def contains_key(self, *keys: Any) -> Self: ... - def does_not_contain_key(self, *keys: Any) -> Self: ... - def contains_value(self, *values: Any) -> Self: ... - def does_not_contain_value(self, *values: Any) -> Self: ... - def contains_entry(self, *args: Any, **kwargs: dict[str, Any]) -> Self: ... - def does_not_contain_entry(self, *args: Any, **kwargs: dict[str, Any]) -> Self: ... + def contains_key(self, *keys: object) -> Self: ... + def does_not_contain_key(self, *keys: object) -> Self: ... + def contains_value(self, *values: object) -> Self: ... + def does_not_contain_value(self, *values: object) -> Self: ... + # The dicts can contain arbitrary keys and values + def contains_entry(self, *args: Iterable[dict[Any, Any]], **kwargs: Any) -> Self: ... + def does_not_contain_entry(self, *args: Iterable[dict[Any, Any]], **kwargs: Any) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dynamic.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dynamic.pyi index 52c8c24d07..681512a565 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dynamic.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/dynamic.pyi @@ -4,4 +4,4 @@ from typing_extensions import Self __tracebackhide__: bool class DynamicMixin: - def __getattr__(self, attr: str) -> Callable[..., Self]: ... + def __getattr__(self, attr: str) -> Callable[[object], Self]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/extracting.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/extracting.pyi index f2ae1f16e9..00e82fb26b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/extracting.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/extracting.pyi @@ -1,4 +1,5 @@ -from collections.abc import Callable, Iterable as _Iterable, Mapping +from _typeshed import SupportsRichComparison +from collections.abc import Callable, Iterable, Mapping from typing import Any from typing_extensions import Self @@ -8,6 +9,7 @@ class ExtractingMixin: def extracting( self, *names: str, + # The callable must accept the type of the items in the self.val collection. filter: str | Mapping[str, Any] | Callable[[Any], bool] = ..., - sort: str | _Iterable[str] | Callable[[Any], Any] = ..., + sort: str | Iterable[str] | Callable[[Any], SupportsRichComparison] = ..., ) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/file.pyi b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/file.pyi index 1bff54a1ae..0bcf395256 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/file.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/assertpy/assertpy/file.pyi @@ -1,10 +1,9 @@ -from _typeshed import StrPath -from typing import IO, AnyStr +from _typeshed import StrPath, SupportsRead from typing_extensions import Self __tracebackhide__: bool -def contents_of(file: IO[AnyStr] | StrPath, encoding: str = "utf-8") -> str: ... +def contents_of(file: SupportsRead[str] | StrPath, encoding: str = "utf-8") -> str: ... class FileMixin: def exists(self) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/atheris/METADATA.toml new file mode 100644 index 0000000000..a99a315846 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/METADATA.toml @@ -0,0 +1,6 @@ +version = "3.0.*" +upstream_repository = "https://github.com/google/atheris" +partial_stub = true + +[tool.stubtest] +ignore_missing_stub = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/__init__.pyi new file mode 100644 index 0000000000..2a3312448a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/__init__.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable + +def Setup( + args: list[str], + test_one_input: Callable[[bytes], None], + **kwargs: bool | Callable[[bytes, int, int], str | bytes] | Callable[[bytes, bytes, int, int], str | bytes] | None, +) -> list[str]: ... +def Fuzz() -> None: ... +def Mutate(data: bytes, max_size: int) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/function_hooks.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/function_hooks.pyi new file mode 100644 index 0000000000..92ec044b7f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/function_hooks.pyi @@ -0,0 +1,14 @@ +from typing import Any + +def hook_re_module() -> None: ... + +class EnabledHooks: + def __init__(self) -> None: ... + def add(self, hook: str) -> None: ... + def __contains__(self, hook: str) -> bool: ... + +enabled_hooks: EnabledHooks + +# args[1] is an arbitrary string method that is called +# with the subsequent arguments, so they will vary +def _hook_str(*args: Any, **kwargs: Any) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/import_hook.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/import_hook.pyi new file mode 100644 index 0000000000..450923a31d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/import_hook.pyi @@ -0,0 +1,36 @@ +import types +from collections.abc import Sequence +from importlib import abc, machinery +from typing_extensions import Self + +def _should_skip(loader: abc.Loader) -> bool: ... + +class AtherisMetaPathFinder(abc.MetaPathFinder): + def __init__( + self, include_packages: set[str], exclude_modules: set[str], enable_loader_override: bool, trace_dataflow: bool + ) -> None: ... + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = None + ) -> machinery.ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... + +class AtherisSourceFileLoader: + def __init__(self, name: str, path: str, trace_dataflow: bool) -> None: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... + +class AtherisSourcelessFileLoader: + def __init__(self, name: str, path: str, trace_dataflow: bool) -> None: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... + +def make_dynamic_atheris_loader(loader: abc.Loader | type[abc.Loader], trace_dataflow: bool) -> abc.Loader: ... + +class HookManager: + def __init__( + self, include_packages: set[str], exclude_modules: set[str], enable_loader_override: bool, trace_dataflow: bool + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + +def instrument_imports( + include: Sequence[str] | None = None, exclude: Sequence[str] | None = None, enable_loader_override: bool = True +) -> HookManager: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/instrument_bytecode.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/instrument_bytecode.pyi new file mode 100644 index 0000000000..b6b935fd1c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/instrument_bytecode.pyi @@ -0,0 +1,7 @@ +from collections.abc import Callable +from typing import TypeVar + +_T = TypeVar("_T") + +def instrument_func(func: Callable[..., _T]) -> Callable[..., _T]: ... +def instrument_all() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/utils.pyi new file mode 100644 index 0000000000..090c833ddb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/utils.pyi @@ -0,0 +1,18 @@ +from typing import Protocol, type_check_only + +def path() -> str: ... +@type_check_only +class _Writer(Protocol): + def isatty(self) -> bool: ... + def write(self, content: str, /) -> object: ... + def flush(self) -> object: ... + +class ProgressRenderer: + def __init__(self, stream: _Writer, total_count: int) -> None: ... + def render(self) -> None: ... + def erase(self) -> None: ... + def drop(self) -> None: ... + @property + def count(self) -> int: ... + @count.setter + def count(self, new_count: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/version_dependent.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/version_dependent.pyi new file mode 100644 index 0000000000..51c1d20605 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/atheris/atheris/version_dependent.pyi @@ -0,0 +1,27 @@ +import types +from typing import Final + +PYTHON_VERSION: Final[tuple[int, int]] +CONDITIONAL_JUMPS: Final[list[str]] +UNCONDITIONAL_JUMPS: Final[list[str]] +ENDS_FUNCTION: Final[list[str]] +HAVE_REL_REFERENCE: Final[list[str]] +HAVE_ABS_REFERENCE: Final[list[str]] +REL_REFERENCE_IS_INVERTED: Final[list[str]] + +def rel_reference_scale(opname: str) -> int: ... + +REVERSE_CMP_OP: Final[list[int]] + +def jump_arg_bytes(arg: int) -> int: ... +def add_bytes_to_jump_arg(arg: int, size: int) -> int: ... + +class ExceptionTableEntry: + def __init__(self, start_offset: int, end_offset: int, target: int, depth: int, lasti: bool) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class ExceptionTable: + def __init__(self, entries: list[ExceptionTableEntry]) -> None: ... + def __eq__(self, other: object) -> bool: ... + +def generate_exceptiontable(original_code: types.CodeType, exception_table_entries: list[ExceptionTableEntry]) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml index 67550f9adb..9dc0897df2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.14.*" +version = "2.15.*" upstream_repository = "https://github.com/aws/aws-xray-sdk-python" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi index d570c5d860..b42232c23d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi @@ -1,6 +1,7 @@ +from .async_recorder import AsyncAWSXRayRecorder as AsyncAWSXRayRecorder from .patcher import patch as patch, patch_all as patch_all from .recorder import AWSXRayRecorder as AWSXRayRecorder -xray_recorder: AWSXRayRecorder +xray_recorder: AsyncAWSXRayRecorder __all__ = ["patch", "patch_all", "xray_recorder", "AWSXRayRecorder"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi index 7579d08962..85c366e821 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi @@ -1,13 +1,23 @@ +from asyncio.events import AbstractEventLoop +from asyncio.tasks import Task, _TaskCompatibleCoro +from typing import Any, TypeVar + from .context import Context as _Context +_T_co = TypeVar("_T_co", covariant=True) + class AsyncContext(_Context): - def __init__(self, *args, loop=None, use_task_factory: bool = True, **kwargs) -> None: ... + def __init__( + self, context_missing: str = "LOG_ERROR", loop: AbstractEventLoop | None = None, use_task_factory: bool = True + ) -> None: ... def clear_trace_entities(self) -> None: ... class TaskLocalStorage: - def __init__(self, loop=None) -> None: ... - def __setattr__(self, name: str, value) -> None: ... - def __getattribute__(self, item: str): ... + def __init__(self, loop: AbstractEventLoop | None = None) -> None: ... + # Sets unknown items on the current task's context attribute + def __setattr__(self, name: str, value: Any) -> None: ... + # Returns unknown items from the current tasks context attribute + def __getattribute__(self, item: str) -> Any | None: ... def clear(self) -> None: ... -def task_factory(loop, coro): ... +def task_factory(loop: AbstractEventLoop | None, coro: _TaskCompatibleCoro[_T_co]) -> Task[_T_co]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi index 4d4bdb8be3..f7e4d588fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi @@ -1,24 +1,43 @@ +from _typeshed import Incomplete +from collections.abc import Awaitable, Callable, Iterable, Mapping from types import TracebackType +from typing import TypeVar -from .models.segment import SegmentContextManager -from .models.subsegment import SubsegmentContextManager +from .models.dummy_entities import DummySegment, DummySubsegment +from .models.segment import Segment, SegmentContextManager +from .models.subsegment import Subsegment, SubsegmentContextManager from .recorder import AWSXRayRecorder +_T = TypeVar("_T") + class AsyncSegmentContextManager(SegmentContextManager): - async def __aenter__(self): ... + async def __aenter__(self) -> DummySegment | Segment: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class AsyncSubsegmentContextManager(SubsegmentContextManager): - async def __call__(self, wrapped, instance, args, kwargs): ... - async def __aenter__(self): ... + async def __call__( + self, wrapped: Callable[..., Awaitable[_T]], instance, args: Iterable[Incomplete], kwargs: Mapping[str, Incomplete] + ) -> _T: ... + async def __aenter__(self) -> DummySubsegment | Subsegment | None: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class AsyncAWSXRayRecorder(AWSXRayRecorder): - def capture_async(self, name=None): ... - def in_segment_async(self, name=None, **segment_kwargs): ... - def in_subsegment_async(self, name=None, **subsegment_kwargs): ... - async def record_subsegment_async(self, wrapped, instance, args, kwargs, name, namespace, meta_processor): ... + def capture_async(self, name: str | None = None) -> AsyncSubsegmentContextManager: ... + def in_segment_async( + self, name: str | None = None, *, traceid: str | None = None, parent_id: str | None = None, sampling: bool | None = None + ) -> AsyncSegmentContextManager: ... + def in_subsegment_async(self, name: str | None = None, *, namespace: str = "local") -> AsyncSubsegmentContextManager: ... + async def record_subsegment_async( + self, + wrapped: Callable[..., Awaitable[_T]], + instance, + args: Iterable[Incomplete], + kwargs: Mapping[str, Incomplete], + name: str, + namespace: str, + meta_processor: Callable[..., object] | None, + ) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi index 3b389b303c..c84574342a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi @@ -1,27 +1,27 @@ import time from logging import Logger -from typing import Any +from typing import Final from .models.entity import Entity from .models.segment import Segment from .models.subsegment import Subsegment log: Logger -SUPPORTED_CONTEXT_MISSING: Any -MISSING_SEGMENT_MSG: str -CXT_MISSING_STRATEGY_KEY: str +MISSING_SEGMENT_MSG: Final[str] +SUPPORTED_CONTEXT_MISSING: Final = ("RUNTIME_ERROR", "LOG_ERROR", "IGNORE_ERROR") +CXT_MISSING_STRATEGY_KEY: Final = "AWS_XRAY_CONTEXT_MISSING" class Context: def __init__(self, context_missing: str = "LOG_ERROR") -> None: ... def put_segment(self, segment: Segment) -> None: ... def end_segment(self, end_time: time.struct_time | None = None) -> None: ... def put_subsegment(self, subsegment: Subsegment) -> None: ... - def end_subsegment(self, end_time: time.struct_time | None = None): ... - def get_trace_entity(self): ... + def end_subsegment(self, end_time: time.struct_time | None = None) -> bool: ... + def get_trace_entity(self) -> Entity: ... def set_trace_entity(self, trace_entity: Entity) -> None: ... def clear_trace_entities(self) -> None: ... def handle_context_missing(self) -> None: ... @property - def context_missing(self): ... + def context_missing(self) -> str: ... @context_missing.setter def context_missing(self, value: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi index e5c4b9609d..35a393a771 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi @@ -1,13 +1,15 @@ -DAEMON_ADDRESS_KEY: str -DEFAULT_ADDRESS: str +from typing import Final + +DAEMON_ADDRESS_KEY: Final = "AWS_XRAY_DAEMON_ADDRESS" +DEFAULT_ADDRESS: Final = "127.0.0.1:2000" class DaemonConfig: - def __init__(self, daemon_address="127.0.0.1:2000") -> None: ... + def __init__(self, daemon_address: str | None = "127.0.0.1:2000") -> None: ... @property - def udp_ip(self): ... + def udp_ip(self) -> str: ... @property - def udp_port(self): ... + def udp_port(self) -> int: ... @property - def tcp_ip(self): ... + def tcp_ip(self) -> str: ... @property - def tcp_port(self): ... + def tcp_port(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi index fa5c2b24ca..affa22d2a4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi @@ -13,6 +13,6 @@ class UDPEmitter: def send_entity(self, entity: Entity) -> None: ... def set_daemon_address(self, address: str | None) -> None: ... @property - def ip(self): ... + def ip(self) -> str: ... @property - def port(self): ... + def port(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi index 9d0cbf5e3f..50e94b257d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi @@ -1,23 +1,19 @@ from logging import Logger +from typing import Final from .context import Context log: Logger -LAMBDA_TRACE_HEADER_KEY: str -LAMBDA_TASK_ROOT_KEY: str -TOUCH_FILE_DIR: str -TOUCH_FILE_PATH: str +LAMBDA_TRACE_HEADER_KEY: Final = "_X_AMZN_TRACE_ID" +LAMBDA_TASK_ROOT_KEY: Final = "LAMBDA_TASK_ROOT" +TOUCH_FILE_DIR: Final = "/tmp/.aws-xray/" +TOUCH_FILE_PATH: Final = "/tmp/.aws-xray/initialized" -def check_in_lambda(): ... +def check_in_lambda() -> LambdaContext | None: ... class LambdaContext(Context): def __init__(self) -> None: ... - def put_segment(self, segment) -> None: ... - def end_segment(self, end_time=None) -> None: ... - def put_subsegment(self, subsegment) -> None: ... - def get_trace_entity(self): ... - @property + @property # type: ignore[override] def context_missing(self) -> None: ... @context_missing.setter - def context_missing(self, value) -> None: ... - def handle_context_missing(self) -> None: ... + def context_missing(self, value: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi index 0e19e4d65f..5ea373e689 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi @@ -7,7 +7,7 @@ from .subsegment import Subsegment from .throwable import Throwable log: Logger -ORIGIN_TRACE_HEADER_ATTR_KEY: Final[str] +ORIGIN_TRACE_HEADER_ATTR_KEY: Final = "_origin_trace_header" class Entity: id: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi index 69c156c46c..2ae599e4c9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi @@ -2,7 +2,7 @@ from typing import Final from .segment import Segment -MUTATION_UNSUPPORTED_MESSAGE: Final[str] +MUTATION_UNSUPPORTED_MESSAGE: Final = "FacadeSegments cannot be mutated." class FacadeSegment(Segment): initializing: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi index 4aa1420621..360493f2f8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi @@ -1,13 +1,13 @@ from typing import Final -URL: Final[str] -METHOD: Final[str] -USER_AGENT: Final[str] -CLIENT_IP: Final[str] -X_FORWARDED_FOR: Final[str] -STATUS: Final[str] -CONTENT_LENGTH: Final[str] -XRAY_HEADER: Final[str] -ALT_XRAY_HEADER: Final[str] +URL: Final = "url" +METHOD: Final = "method" +USER_AGENT: Final = "user_agent" +CLIENT_IP: Final = "client_ip" +X_FORWARDED_FOR: Final = "x_forwarded_for" +STATUS: Final = "status" +CONTENT_LENGTH: Final = "content_length" +XRAY_HEADER: Final = "X-Amzn-Trace-Id" +ALT_XRAY_HEADER: Final = "HTTP_X_AMZN_TRACE_ID" request_keys: tuple[str, ...] response_keys: tuple[str, ...] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi index 903378f6e6..1851d8ea32 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi @@ -8,7 +8,7 @@ from .dummy_entities import DummySegment from .entity import Entity from .subsegment import Subsegment -ORIGIN_TRACE_HEADER_ATTR_KEY: Final[str] +ORIGIN_TRACE_HEADER_ATTR_KEY: Final = "_origin_trace_header" class SegmentContextManager: name: str | None diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi index 57ceefa1cb..27481f8587 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from collections.abc import Callable from types import TracebackType from typing import Final @@ -7,10 +8,10 @@ from .dummy_entities import DummySubsegment from .entity import Entity from .segment import Segment -SUBSEGMENT_RECORDING_ATTRIBUTE: Final[str] +SUBSEGMENT_RECORDING_ATTRIBUTE: Final = "_self___SUBSEGMENT_RECORDING_ATTRIBUTE__" def set_as_recording(decorated_func, wrapped) -> None: ... -def is_already_recording(func): ... +def is_already_recording(func: Callable[..., object]) -> bool: ... def subsegment_decorator(wrapped, instance, args, kwargs): ... class SubsegmentContextManager: diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi index 54b9186343..e4e0f748fd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi @@ -1,7 +1,9 @@ -from _typeshed import Incomplete from logging import Logger from traceback import StackSummary from typing import TypedDict, type_check_only +from typing_extensions import NotRequired + +log: Logger @type_check_only class _StackInfo(TypedDict): @@ -9,7 +11,13 @@ class _StackInfo(TypedDict): line: int label: str -log: Logger +@type_check_only +class _ThrowableAttrs(TypedDict): + id: str + message: NotRequired[str] + type: str + remote: bool + stack: NotRequired[list[_StackInfo]] class Throwable: id: str @@ -18,4 +26,4 @@ class Throwable: remote: bool stack: list[_StackInfo] | None def __init__(self, exception: Exception, stack: StackSummary, remote: bool = False) -> None: ... - def to_dict(self) -> dict[str, Incomplete]: ... + def to_dict(self) -> _ThrowableAttrs: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi index 20cee42f8e..6b9f5d7322 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi @@ -1,9 +1,10 @@ from collections.abc import Iterable from logging import Logger +from typing import Final log: Logger -SUPPORTED_MODULES: tuple[str, ...] -NO_DOUBLE_PATCH: tuple[str, ...] +SUPPORTED_MODULES: Final[tuple[str, ...]] +NO_DOUBLE_PATCH: Final[tuple[str, ...]] def patch_all(double_patch: bool = False) -> None: ... def patch( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi index aae2f13951..84c29f5631 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi @@ -3,9 +3,9 @@ from logging import Logger from typing import Any, Final, overload log: Logger -SERVICE_NAME: Final[str] -ORIGIN: Final[str] -IMDS_URL: Final[str] +SERVICE_NAME: Final = "ec2" +ORIGIN: Final = "AWS::EC2::Instance" +IMDS_URL: Final = "http://169.254.169.254/latest/" def initialize() -> None: ... def get_token() -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi index b2cd96d3dc..019494fb0c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi @@ -2,7 +2,7 @@ from logging import Logger from typing import Final log: Logger -SERVICE_NAME: Final[str] -ORIGIN: Final[str] +SERVICE_NAME: Final = "ecs" +ORIGIN: Final = "AWS::ECS::Container" def initialize() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi index 3ae45f58cd..9ac6314784 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi @@ -2,8 +2,8 @@ from logging import Logger from typing import Final log: Logger -CONF_PATH: Final[str] -SERVICE_NAME: Final[str] -ORIGIN: Final[str] +CONF_PATH: Final = "/var/elasticbeanstalk/xray/environment.conf" +SERVICE_NAME: Final = "elastic_beanstalk" +ORIGIN: Final = "AWS::ElasticBeanstalk::Environment" def initialize() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi index ee8f964f3e..8b05f527a5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi @@ -1,8 +1,8 @@ import time -from _typeshed import FileDescriptorOrPath -from collections.abc import Callable, Iterable +from _typeshed import FileDescriptorOrPath, Incomplete +from collections.abc import Callable, Iterable, Mapping from logging import Logger -from typing import Any +from typing import Any, Final, TypeVar from .context import Context from .emitters.udp_emitter import UDPEmitter @@ -15,11 +15,13 @@ from .sampling.sampler import DefaultSampler from .streaming.default_streaming import DefaultStreaming log: Logger -TRACING_NAME_KEY: str -DAEMON_ADDR_KEY: str -CONTEXT_MISSING_KEY: str -XRAY_META: dict[str, dict[str, str]] -SERVICE_INFO: dict[str, str] +TRACING_NAME_KEY: Final = "AWS_XRAY_TRACING_NAME" +DAEMON_ADDR_KEY: Final = "AWS_XRAY_DAEMON_ADDRESS" +CONTEXT_MISSING_KEY: Final = "AWS_XRAY_CONTEXT_MISSING" +XRAY_META: Final[dict[str, dict[str, str]]] +SERVICE_INFO: Final[dict[str, str]] + +_T = TypeVar("_T") class AWSXRayRecorder: def __init__(self) -> None: ... @@ -40,8 +42,10 @@ class AWSXRayRecorder: sampler: LocalSampler | DefaultSampler | None = None, stream_sql: bool | None = True, ) -> None: ... - def in_segment(self, name: str | None = None, **segment_kwargs) -> SegmentContextManager: ... - def in_subsegment(self, name: str | None = None, **subsegment_kwargs) -> SubsegmentContextManager: ... + def in_segment( + self, name: str | None = None, *, traceid: str | None = None, parent_id: str | None = None, sampling: bool | None = None + ) -> SegmentContextManager: ... + def in_subsegment(self, name: str | None = None, *, namespace: str = "local") -> SubsegmentContextManager: ... def begin_segment( self, name: str | None = None, traceid: str | None = None, parent_id: str | None = None, sampling: bool | None = None ) -> Segment | DummySegment: ... @@ -61,14 +65,14 @@ class AWSXRayRecorder: def capture(self, name: str | None = None) -> SubsegmentContextManager: ... def record_subsegment( self, - wrapped: Callable[..., Any], + wrapped: Callable[..., _T], instance: Any, - args: list[Any], - kwargs: dict[str, Any], + args: Iterable[Incomplete], + kwargs: Mapping[str, Incomplete], name: str, namespace: str, - meta_processor: Callable[..., object], - ) -> Any: ... + meta_processor: Callable[..., object] | None, + ) -> _T: ... @property def enabled(self) -> bool: ... @enabled.setter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi index 6fb06c428f..c6e06134ac 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi @@ -4,7 +4,7 @@ TTL: Final = 3600 class RuleCache: def __init__(self) -> None: ... - def get_matched_rule(self, sampling_req, now): ... + def get_matched_rule(self, sampling_req, now: float): ... def load_rules(self, rules) -> None: ... def load_targets(self, targets_dict) -> None: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi index 53235a4db5..0bc379e1cb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi @@ -1,5 +1,7 @@ from logging import Logger +from aws_xray_sdk.core.daemon_config import DaemonConfig + log: Logger class DefaultSampler: @@ -7,7 +9,7 @@ class DefaultSampler: def start(self) -> None: ... def should_trace(self, sampling_req=None): ... def load_local_rules(self, rules) -> None: ... - def load_settings(self, daemon_config, context, origin=None) -> None: ... + def load_settings(self, daemon_config: DaemonConfig, context, origin=None) -> None: ... @property def xray_client(self): ... @xray_client.setter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi index 1ea487cfb0..f67af0b609 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi @@ -1,13 +1,23 @@ +from typing import Literal, TypedDict, type_check_only + +from .reservoir import Reservoir + +@type_check_only +class _Stats(TypedDict): + request_count: int + borrow_count: int + sampled_count: int + class SamplingRule: def __init__( - self, name, priority, rate, reservoir_size, host=None, method=None, path=None, service=None, service_type=None + self, name: str, priority, rate, reservoir_size, host=None, method=None, path=None, service=None, service_type=None ) -> None: ... - def match(self, sampling_req): ... - def is_default(self): ... - def snapshot_statistics(self): ... + def match(self, sampling_req) -> bool: ... + def is_default(self) -> bool: ... + def snapshot_statistics(self) -> _Stats: ... def merge(self, rule) -> None: ... - def ever_matched(self): ... - def time_to_report(self): ... + def ever_matched(self) -> bool: ... + def time_to_report(self) -> Literal[True] | None: ... def increment_request_count(self) -> None: ... def increment_borrow_count(self) -> None: ... def increment_sampled_count(self) -> None: ... @@ -16,18 +26,18 @@ class SamplingRule: @rate.setter def rate(self, v) -> None: ... @property - def name(self): ... + def name(self) -> str: ... @property def priority(self): ... @property - def reservoir(self): ... + def reservoir(self) -> Reservoir: ... @reservoir.setter - def reservoir(self, v) -> None: ... + def reservoir(self, v: Reservoir) -> None: ... @property - def can_borrow(self): ... + def can_borrow(self) -> bool: ... @property - def request_count(self): ... + def request_count(self) -> int: ... @property - def borrow_count(self): ... + def borrow_count(self) -> int: ... @property - def sampled_count(self): ... + def sampled_count(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi index 7fdc45bf8f..7a1a5ff753 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi @@ -1,7 +1,11 @@ from logging import Logger +from .connector import ServiceConnector +from .rule_cache import RuleCache +from .rule_poller import RulePoller + log: Logger class TargetPoller: - def __init__(self, cache, rule_poller, connector) -> None: ... + def __init__(self, cache: RuleCache, rule_poller: RulePoller, connector: ServiceConnector) -> None: ... def start(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/__init__.pyi new file mode 100644 index 0000000000..47b402c8e2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/__init__.pyi @@ -0,0 +1,3 @@ +from .patch import patch as patch + +__all__ = ["patch"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/patch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/patch.pyi new file mode 100644 index 0000000000..969a93686f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/psycopg/patch.pyi @@ -0,0 +1 @@ +def patch() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/util.pyi index daf9f4501b..d61f173ceb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/ext/util.pyi @@ -5,7 +5,7 @@ from aws_xray_sdk.core.models.trace_header import TraceHeader first_cap_re: Final[re.Pattern[str]] all_cap_re: Final[re.Pattern[str]] -UNKNOWN_HOSTNAME: str = "UNKNOWN HOST" +UNKNOWN_HOSTNAME: Final = "UNKNOWN HOST" def inject_trace_header(headers, entity) -> None: ... def calculate_sampling_decision(trace_header, recorder, sampling_req): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml index 7dfbd41a85..38c7fd7b5c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml @@ -1,4 +1,4 @@ -version = "6.2.*" +version = "6.3.*" requires = ["types-html5lib"] upstream_repository = "https://github.com/mozilla/bleach" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi index d24c2d825f..1904b538e8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi @@ -50,7 +50,7 @@ class InputStreamWithMemory: class BleachHTMLTokenizer(HTMLTokenizer): consume_entities: bool - stream: InputStreamWithMemory + stream: InputStreamWithMemory # type: ignore[assignment] emitted_last_token: dict[str, Any] | None def __init__(self, consume_entities: bool = False, **kwargs: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi index 73fe653c1b..e6ec5d0ae3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi @@ -36,7 +36,7 @@ class Linker: # or `html5lib` token might be reused _Token: TypeAlias = dict[str, Any] -class LinkifyFilter(Filter): +class LinkifyFilter(Filter[_Token]): callbacks: Iterable[_Callback] skip_tags: Container[str] parse_email: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi index a86be65fea..897c17f8a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi @@ -40,7 +40,7 @@ class Cleaner: protocols: Iterable[str] strip: bool strip_comments: bool - filters: Iterable[Filter] + filters: Iterable[_FilterConstructor] css_sanitizer: CSSSanitizer | None parser: BleachHTMLParser walker: TreeWalker @@ -85,7 +85,7 @@ class BleachSanitizerFilter(SanitizerFilter): def sanitize_stream(self, token_iterator: Iterable[_Token]) -> Iterator[_Token]: ... def merge_characters(self, token_iterator: Iterable[_Token]) -> Iterator[_Token]: ... def __iter__(self) -> Iterator[_Token]: ... - def sanitize_token(self, token: _Token) -> _Token | list[_Token] | None: ... + def sanitize_token(self, token: _Token) -> _Token | list[_Token] | None: ... # type: ignore[override] def sanitize_characters(self, token: _Token) -> _Token | list[_Token]: ... def sanitize_uri_value(self, value: str, allowed_protocols: Container[str]) -> str | None: ... def allow_token(self, token: _Token) -> _Token: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml index 30cdf466ba..43d34d36b4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml @@ -1,2 +1,2 @@ -version = "4.39.*" +version = "4.41.*" upstream_repository = "https://github.com/braintree/braintree_python" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address.pyi index f90932b60f..eaf4ec2016 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete from typing import Final +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.successful_result import SuccessfulResult class Address(Resource): class ShippingMethod: @@ -14,13 +16,15 @@ class Address(Resource): PickupInStore: Final = "pickup_in_store" @staticmethod - def create(params: dict[str, Incomplete] | None = None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def delete(customer_id: str, address_id: str): ... + def delete(customer_id: str, address_id: str) -> SuccessfulResult: ... @staticmethod - def find(customer_id: str, address_id: str): ... + def find(customer_id: str, address_id: str) -> Address: ... @staticmethod - def update(customer_id: str, address_id: str, params: dict[str, Incomplete] | None = None): ... + def update( + customer_id: str, address_id: str, params: dict[str, Incomplete] | None = None + ) -> SuccessfulResult | ErrorResult | None: ... @staticmethod def create_signature() -> list[str | dict[str, list[str]]]: ... @staticmethod diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address_gateway.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address_gateway.pyi index fc7735b016..0ecadbe8c9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address_gateway.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/address_gateway.pyi @@ -1,13 +1,14 @@ from _typeshed import Incomplete from braintree.address import Address +from braintree.braintree_gateway import BraintreeGateway from braintree.error_result import ErrorResult from braintree.successful_result import SuccessfulResult class AddressGateway: - gateway: Incomplete + gateway: BraintreeGateway config: Incomplete - def __init__(self, gateway) -> None: ... + def __init__(self, gateway: BraintreeGateway) -> None: ... def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... def delete(self, customer_id: str, address_id: str) -> SuccessfulResult: ... def find(self, customer_id: str, address_id: str) -> Address: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token.pyi index 1e2f68f19a..4855a008da 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token.pyi @@ -1,5 +1,9 @@ +from _typeshed import Incomplete + +from braintree.braintree_gateway import BraintreeGateway + class ClientToken: @staticmethod - def generate(params=None, gateway=None): ... + def generate(params: dict[str, Incomplete] | None = None, gateway: BraintreeGateway | None = None) -> str: ... @staticmethod def generate_signature() -> list[str | dict[str, list[str]]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token_gateway.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token_gateway.pyi index 1f53d277ca..b07681cee2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token_gateway.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/client_token_gateway.pyi @@ -1,7 +1,9 @@ from _typeshed import Incomplete +from braintree.braintree_gateway import BraintreeGateway + class ClientTokenGateway: - gateway: Incomplete + gateway: BraintreeGateway config: Incomplete - def __init__(self, gateway) -> None: ... - def generate(self, params=None): ... + def __init__(self, gateway: BraintreeGateway) -> None: ... + def generate(self, params: dict[str, Incomplete] | None = None) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi index 154bf9051b..d56f7e6903 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi @@ -440,7 +440,6 @@ class ErrorCodes: IdToRemoveIsInvalid: Final = "92025" class Transaction: - AdjustmentAmountMustBeGreaterThanZero: Final = "95605" AmountCannotBeNegative: Final = "81501" AmountDoesNotMatch3DSecureAmount: Final = "91585" AmountIsInvalid: Final = "81503" @@ -509,6 +508,7 @@ class ErrorCodes: PaymentMethodNonceUnknown: Final = "91565" PaymentMethodTokenCardTypeIsNotAccepted: Final = "91517" PaymentMethodTokenIsInvalid: Final = "91518" + ProcessingMerchantCategoryCodeIsInvalid: Final = "915265" ProcessorAuthorizationCodeCannotBeSet: Final = "91519" ProcessorAuthorizationCodeIsInvalid: Final = "81520" ProcessorDoesNotSupportAuths: Final = "915104" @@ -572,8 +572,17 @@ class ErrorCodes: TransactionIsNotEligibleForAdjustment: Final = "915219" TransactionMustBeInStateAuthorized: Final = "915218" TransactionSourceIsInvalid: Final = "915133" - TransferTypeIsInvalid: Final = "97501" + TransferDetailsAreNotApplicableForThisMerchantAccount: Final = "97511" TransferDetailsAreRequired: Final = "97510" + TransferReceiverAccountReferenceNumberIsNotValid: Final = "97509" + TransferReceiverFirstNameIsNotValid: Final = "97507" + TransferReceiverLastNameIsNotValid: Final = "97508" + TransferReceiverTaxIdIsNotValid: Final = "97506" + TransferSenderAccountReferenceNumberIsNotValid: Final = "97505" + TransferSenderFirstNameIsNotValid: Final = "97503" + TransferSenderLastNameIsNotValid: Final = "97504" + TransferSenderTaxIdIsNotValid: Final = "97502" + TransferTypeIsInvalid: Final = "97501" TypeIsInvalid: Final = "91523" TypeIsRequired: Final = "91524" UnsupportedVoiceAuthorization: Final = "91539" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method.pyi index 9d87458d2c..41ac52cb68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method.pyi @@ -1,14 +1,43 @@ from _typeshed import Incomplete +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard +from braintree.apple_pay_card import ApplePayCard +from braintree.credit_card import CreditCard from braintree.error_result import ErrorResult +from braintree.europe_bank_account import EuropeBankAccount +from braintree.masterpass_card import MasterpassCard +from braintree.paypal_account import PayPalAccount from braintree.resource import Resource +from braintree.samsung_pay_card import SamsungPayCard +from braintree.sepa_direct_debit_account import SepaDirectDebitAccount from braintree.successful_result import SuccessfulResult +from braintree.unknown_payment_method import UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount +from braintree.venmo_account import VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard class PaymentMethod(Resource): @staticmethod def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... @staticmethod - def find(payment_method_token: str) -> Resource: ... + def find( + payment_method_token: str, + ) -> ( + AndroidPayCard + | ApplePayCard + | EuropeBankAccount + | CreditCard + | PayPalAccount + | UsBankAccount + | VenmoAccount + | VisaCheckoutCard + | AmexExpressCheckoutCard + | SepaDirectDebitAccount + | MasterpassCard + | SamsungPayCard + | UnknownPaymentMethod + ): ... @staticmethod def update(payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... @staticmethod diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method_gateway.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method_gateway.pyi index 73393a851e..16054b4413 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method_gateway.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/payment_method_gateway.pyi @@ -1,15 +1,43 @@ from _typeshed import Incomplete +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard +from braintree.apple_pay_card import ApplePayCard +from braintree.credit_card import CreditCard from braintree.error_result import ErrorResult -from braintree.resource import Resource +from braintree.europe_bank_account import EuropeBankAccount +from braintree.masterpass_card import MasterpassCard +from braintree.paypal_account import PayPalAccount +from braintree.samsung_pay_card import SamsungPayCard +from braintree.sepa_direct_debit_account import SepaDirectDebitAccount from braintree.successful_result import SuccessfulResult +from braintree.unknown_payment_method import UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount +from braintree.venmo_account import VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard class PaymentMethodGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult: ... - def find(self, payment_method_token: str) -> Resource: ... + def find( + self, payment_method_token: str + ) -> ( + AndroidPayCard + | ApplePayCard + | EuropeBankAccount + | CreditCard + | PayPalAccount + | UsBankAccount + | VenmoAccount + | VisaCheckoutCard + | AmexExpressCheckoutCard + | SepaDirectDebitAccount + | MasterpassCard + | SamsungPayCard + | UnknownPaymentMethod + ): ... def update(self, payment_method_token: str, params) -> SuccessfulResult | ErrorResult: ... def delete(self, payment_method_token: str, options=None) -> SuccessfulResult: ... options: dict[str, Incomplete] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi index 98acf134f3..24bb2a09a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi @@ -1,12 +1,16 @@ from _typeshed import Incomplete +from datetime import date from decimal import Decimal from typing import Final from braintree.add_on import AddOn from braintree.descriptor import Descriptor from braintree.discount import Discount +from braintree.error_result import ErrorResult from braintree.resource import Resource +from braintree.resource_collection import ResourceCollection from braintree.subscription_status_event import SubscriptionStatusEvent +from braintree.successful_result import SuccessfulResult from braintree.transaction import Transaction class Subscription(Resource): @@ -27,23 +31,24 @@ class Subscription(Resource): Pending: Final = "Pending" @staticmethod - def create(params=None): ... + def create(params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod def create_signature(): ... @staticmethod - def find(subscription_id): ... + def find(subscription_id: str) -> Subscription: ... @staticmethod def retry_charge(subscription_id, amount=None, submit_for_settlement: bool = False): ... @staticmethod - def update(subscription_id, params=None): ... + def update(subscription_id: str, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def cancel(subscription_id): ... + def cancel(subscription_id: str) -> SuccessfulResult | ErrorResult | None: ... @staticmethod - def search(*query): ... + def search(*query) -> ResourceCollection: ... @staticmethod def update_signature(): ... price: Decimal balance: Decimal + next_billing_date: date next_billing_period_amount: Decimal add_ons: list[AddOn] descriptor: Descriptor diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_details.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_details.pyi index 30014161f0..df2da1daa1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_details.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_details.pyi @@ -1,3 +1,7 @@ +from datetime import date + from braintree.attribute_getter import AttributeGetter -class SubscriptionDetails(AttributeGetter): ... +class SubscriptionDetails(AttributeGetter): + billing_period_start_date: date + billing_period_end_date: date diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_gateway.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_gateway.pyi index 37dfbae24b..44907bce1d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_gateway.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription_gateway.pyi @@ -1,12 +1,19 @@ from _typeshed import Incomplete +from braintree.error_result import ErrorResult +from braintree.resource_collection import ResourceCollection +from braintree.subscription import Subscription +from braintree.successful_result import SuccessfulResult + class SubscriptionGateway: gateway: Incomplete config: Incomplete def __init__(self, gateway) -> None: ... - def cancel(self, subscription_id): ... - def create(self, params=None): ... - def find(self, subscription_id): ... + def cancel(self, subscription_id: str) -> SuccessfulResult | ErrorResult | None: ... + def create(self, params: dict[str, Incomplete] | None = None) -> SuccessfulResult | ErrorResult | None: ... + def find(self, subscription_id: str) -> Subscription: ... def retry_charge(self, subscription_id, amount=None, submit_for_settlement: bool = False): ... - def search(self, *query): ... - def update(self, subscription_id, params=None): ... + def search(self, *query) -> ResourceCollection: ... + def update( + self, subscription_id: str, params: dict[str, Incomplete] | None = None + ) -> SuccessfulResult | ErrorResult | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi index 6aebd73697..9554df4631 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from datetime import datetime from decimal import Decimal from typing import Final @@ -26,6 +27,7 @@ from braintree.payment_facilitator import PaymentFacilitator from braintree.paypal_account import PayPalAccount from braintree.paypal_here import PayPalHere from braintree.resource import Resource +from braintree.resource_collection import ResourceCollection from braintree.risk_data import RiskData from braintree.samsung_pay_card import SamsungPayCard from braintree.sepa_direct_debit_account import SepaDirectDebitAccount @@ -102,13 +104,13 @@ class Transaction(Resource): @staticmethod def credit(params=None): ... @staticmethod - def find(transaction_id): ... + def find(transaction_id: str) -> Transaction: ... @staticmethod def refund(transaction_id, amount_or_options=None): ... @staticmethod def sale(params=None): ... @staticmethod - def search(*query): ... + def search(*query) -> ResourceCollection: ... @staticmethod def submit_for_settlement(transaction_id, amount=None, params=None): ... @staticmethod @@ -176,6 +178,8 @@ class Transaction(Resource): network_transaction_id: Incomplete payment_facilitator: PaymentFacilitator transfer: Transfer + subscription_id: str + created_at: datetime def __init__(self, gateway, attributes) -> None: ... @property def vault_billing_address(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction_gateway.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction_gateway.pyi index a6ad5f3f6c..c3bfc88807 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction_gateway.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction_gateway.pyi @@ -1,5 +1,8 @@ from _typeshed import Incomplete +from braintree.resource_collection import ResourceCollection +from braintree.transaction import Transaction + class TransactionGateway: gateway: Incomplete config: Incomplete @@ -9,10 +12,10 @@ class TransactionGateway: def cancel_release(self, transaction_id): ... def create(self, params): ... def credit(self, params): ... - def find(self, transaction_id): ... + def find(self, transaction_id: str) -> Transaction: ... def refund(self, transaction_id, amount_or_options=None): ... def sale(self, params): ... - def search(self, *query): ... + def search(self, *query) -> ResourceCollection: ... def submit_for_settlement(self, transaction_id, amount=None, params=None): ... def update_details(self, transaction_id, params=None): ... def submit_for_partial_settlement(self, transaction_id, amount, params=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/chevron/METADATA.toml old mode 100755 new mode 100644 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/colorful/METADATA.toml new file mode 100644 index 0000000000..251b9ed1e2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.5.*" +upstream_repository = "https://github.com/timofurrer/colorful" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/__init__.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/ExifRead/exifread/tags/makernote/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/__init__.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/ansi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/ansi.pyi new file mode 100644 index 0000000000..ed25609c67 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/ansi.pyi @@ -0,0 +1,14 @@ +from typing import Final + +MODIFIERS: Final[dict[str, tuple[int, int]]] +MODIFIER_RESET_OFFSET: Final[int] +FOREGROUND_COLOR_OFFSET: Final[int] +BACKGROUND_COLOR_OFFSET: Final[int] +COLOR_CLOSE_OFFSET: Final[int] +CSI: Final[str] +ANSI_ESCAPE_CODE: Final[str] +NEST_PLACEHOLDER: Final[str] + +def round(value: float) -> int: ... +def rgb_to_ansi256(r: int, g: int, b: int) -> int: ... +def rgb_to_ansi16(r: int, g: int, b: int, use_bright: bool = False) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/colors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/colors.pyi new file mode 100644 index 0000000000..8865b23572 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/colors.pyi @@ -0,0 +1,6 @@ +from _typeshed import SupportsItems + +def parse_colors(path: str) -> SupportsItems[str, str | tuple[int, int, int]]: ... +def parse_rgb_txt_file(path: str) -> SupportsItems[str, str | tuple[int, int, int]]: ... +def parse_json_color_file(path: str) -> dict[str, str]: ... +def sanitize_color_palette(colorpalette: SupportsItems[str, str | tuple[int, int, int]]) -> dict[str, tuple[int, int, int]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/core.pyi new file mode 100644 index 0000000000..93bce78dff --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/core.pyi @@ -0,0 +1,102 @@ +from _typeshed import SupportsGetItem, SupportsItems, SupportsWrite + +# This module defines a function "str()", which is why "str" can't be used +# as a type annotation or type alias. +from builtins import str as _str +from collections.abc import Iterator +from typing import Any, Final, Literal +from typing_extensions import LiteralString, Self, TypeAlias + +# Custom type helpers +_ColorModeType: TypeAlias = Literal[0, 8, 16, 256, 16777215] +_PaletteType: TypeAlias = dict[_str, _str] | dict[_str, tuple[int, int, int]] | dict[_str, _str | tuple[int, int, int]] +_StyleType: TypeAlias = tuple[_str, _str] + +DEFAULT_RGB_TXT_PATH: Final[_str] +COLOR_PALETTE: Final[dict[_str, _str]] +COLORNAMES_COLORS_PATH: Final[_str] + +class ColorfulError(Exception): ... +class ColorfulAttributeError(AttributeError, ColorfulError): ... + +def translate_rgb_to_ansi_code(red: int, green: int, blue: int, offset: int, colormode: _ColorModeType) -> _str: ... +def translate_colorname_to_ansi_code( + colorname: _str, offset: int, colormode: _ColorModeType, colorpalette: SupportsGetItem[_str, _str | tuple[int, int, int]] +) -> _str: ... +def resolve_modifier_to_ansi_code(modifiername: _str, colormode: _ColorModeType) -> _str: ... +def translate_style( + style: _str, colormode: _ColorModeType, colorpalette: SupportsGetItem[_str, _str | tuple[int, int, int]] +) -> _str: ... +def style_string(string: _str, ansi_style: _StyleType, colormode: _ColorModeType, nested: bool = False) -> _str: ... + +class ColorfulString: + orig_string: _str + styled_string: _str + colorful_ctx: Colorful + def __init__(self, orig_string: _str, styled_string: _str, colorful_ctx: Colorful) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_str]: ... + def __add__(self, other: _str | ColorfulString) -> Self: ... + def __iadd__(self, other: _str | ColorfulString) -> Self: ... + def __radd__(self, other: _str | ColorfulString) -> Self: ... + def __mul__(self, other: _str) -> Self: ... + def __format__(self, format_spec: _str) -> _str: ... + # Forwards item access to styled_string (a str). + def __getattr__(self, name: _str) -> Any: ... + +class Colorful: + NO_COLORS: Final[int] + ANSI_8_COLORS: Final[int] + ANSI_16_COLORS: Final[int] + ANSI_256_COLORS: Final[int] + TRUE_COLORS: Final[int] + COLORNAMES_COLORS = COLORNAMES_COLORS_PATH + close_fg_color: Final[_str] + close_bg_color: Final[_str] + no_bold: Final[_str] + no_dimmed: Final[_str] + no_italic: Final[_str] + no_underlined: Final[_str] + no_blinkslow: Final[_str] + no_blinkrapid: Final[_str] + no_inversed: Final[_str] + no_concealed: Final[_str] + no_struckthrough: Final[_str] + colormode: _ColorModeType + def __init__(self, colormode: _ColorModeType | None = None, colorpalette: _str | _PaletteType | None = None) -> None: ... + @property + def colorpalette(self) -> SupportsItems[str, str | tuple[int, int, int]] | None: ... + @colorpalette.setter + def colorpalette(self, colorpalette: _str | _PaletteType) -> None: ... + def setup( + self, + colormode: _ColorModeType | None = None, + colorpalette: _str | _PaletteType | None = None, + extend_colors: bool = False, + ) -> None: ... + def disable(self) -> None: ... + def use_8_ansi_colors(self) -> None: ... + def use_16_ansi_colors(self) -> None: ... + def use_256_ansi_colors(self) -> None: ... + def use_true_colors(self) -> None: ... + def use_palette(self, colorpalette: _str | _PaletteType) -> None: ... + def update_palette(self, colorpalette: _str | _PaletteType) -> None: ... + def use_style(self, style_name: _str) -> None: ... + def format(self, string: _str, *args: LiteralString, **kwargs: LiteralString) -> _str: ... + def str(self, string: _str) -> ColorfulString: ... + def print( + self, *objects: object, sep: _str = " ", end: _str = "\n", file: SupportsWrite[_str] | None = None, flush: bool = False + ) -> None: ... + + class ColorfulStyle: + colormode: _ColorModeType + colorful_ctx: Colorful + def __init__(self, style: _StyleType, colormode: _ColorModeType, colorful_ctx: Colorful) -> None: ... + def evaluate(self, string: _str, nested: bool = False) -> ColorfulString: ... + def __and__(self, other: Self) -> Self: ... + def __call__(self, string: _str, nested: bool = False) -> ColorfulString: ... + def __or__(self, other) -> ColorfulString: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + + def __getattr__(self, name: _str) -> ColorfulStyle: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/styles.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/styles.pyi new file mode 100644 index 0000000000..8a0fcbe017 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/styles.pyi @@ -0,0 +1,4 @@ +from typing import Final + +SOLARIZED: Final[dict[str, str]] +MONOKAI: Final[dict[str, str]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/terminal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/terminal.pyi new file mode 100644 index 0000000000..3445996c48 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/terminal.pyi @@ -0,0 +1,16 @@ +from typing import Final, Protocol, overload, type_check_only + +@type_check_only +class _SupportsGet(Protocol): + @overload + def get(self, name: str, /) -> str | None: ... + @overload + def get(self, name: str, default: str, /) -> str: ... + +NO_COLORS: Final[int] +ANSI_8_COLORS: Final[int] +ANSI_16_COLORS: Final[int] +ANSI_256_COLORS: Final[int] +TRUE_COLORS: Final[int] + +def detect_color_support(env: _SupportsGet) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/utils.pyi new file mode 100644 index 0000000000..a4156beb64 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorful/colorful/utils.pyi @@ -0,0 +1,2 @@ +def hex_to_rgb(value: str) -> tuple[int, int, int]: ... +def check_hex(value: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi index 91f3df155c..78beff8899 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi @@ -20,9 +20,9 @@ class FunctionMaker: args: list[str] varargs: str | None varkw: str | None - defaults: tuple[Any, ...] + defaults: tuple[Any, ...] | None kwonlyargs: list[str] - kwonlydefaults: str | None + kwonlydefaults: _dict[str, Any] | None shortsignature: str | None name: str doc: str | None diff --git a/packages/pyright-internal/typeshed-fallback/stubs/django-filter/django_filters/filterset.pyi b/packages/pyright-internal/typeshed-fallback/stubs/django-filter/django_filters/filterset.pyi index 7b59f30045..72cdeab7b5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/django-filter/django_filters/filterset.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/django-filter/django_filters/filterset.pyi @@ -77,7 +77,9 @@ class BaseFilterSet: cls, field: models.Field[Any, Any], field_name: str, lookup_expr: str | None = None ) -> Filter: ... # Accepts any Django field type @classmethod - def filter_for_lookup(cls, field: models.Field[Any, Any], lookup_type: str) -> type[Filter]: ... # Field type varies by model + def filter_for_lookup( + cls, field: models.Field[Any, Any], lookup_type: str # Field type varies by model + ) -> tuple[type[Filter], dict[str, Any]]: ... class FilterSet(BaseFilterSet, metaclass=FilterSetMetaclass): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/container.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/container.pyi index a44bd63fb4..f8e225493e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/container.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/container.pyi @@ -16,6 +16,11 @@ class _HasId(TypedDict): class _HasID(TypedDict): ID: str +@type_check_only +class _TopResult(TypedDict): + Titles: list[str] + Processes: list[list[str]] + _Container: TypeAlias = _HasId | _HasID | str class ContainerApiMixin: @@ -145,7 +150,7 @@ class ContainerApiMixin: def start(self, container: _Container) -> None: ... def stats(self, container: _Container, decode: bool | None = None, stream: bool = True, one_shot: bool | None = None): ... def stop(self, container: _Container, timeout: int | None = None) -> None: ... - def top(self, container: _Container, ps_args: str | None = None) -> str: ... + def top(self, container: _Container, ps_args: str | None = None) -> _TopResult: ... def unpause(self, container: _Container) -> None: ... def update_container( self, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/daemon.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/daemon.pyi index 60dccc7393..84d38f7add 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/daemon.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/daemon.pyi @@ -1,18 +1,26 @@ -from _typeshed import Incomplete from datetime import datetime -from typing import Any +from typing import Any, Literal, overload from docker.types.daemon import CancellableStream class DaemonApiMixin: def df(self) -> dict[str, Any]: ... + @overload def events( self, since: datetime | int | None = None, until: datetime | int | None = None, filters: dict[str, Any] | None = None, - decode: bool | None = None, - ) -> CancellableStream[Incomplete]: ... + decode: Literal[False] | None = None, + ) -> CancellableStream[str]: ... + @overload + def events( + self, + since: datetime | int | None = None, + until: datetime | int | None = None, + filters: dict[str, Any] | None = None, + decode: Literal[True] = ..., + ) -> CancellableStream[dict[str, Any]]: ... def info(self) -> dict[str, Any]: ... def login( self, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/exec_api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/exec_api.pyi index 60a3e4682c..a4dcd13a71 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/exec_api.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/api/exec_api.pyi @@ -1,3 +1,11 @@ +from _io import _BufferedReaderStream +from _typeshed import Incomplete +from socket import SocketIO +from typing import Literal, overload + +from docker.transport.sshconn import SSHSocket +from docker.types.daemon import CancellableStream + class ExecApiMixin: def exec_create( self, @@ -9,12 +17,120 @@ class ExecApiMixin: tty: bool = False, privileged: bool = False, user: str = "", - environment=None, - workdir=None, - detach_keys=None, - ): ... - def exec_inspect(self, exec_id): ... - def exec_resize(self, exec_id, height=None, width=None) -> None: ... + environment: dict[str, str] | list[str] | None = None, + workdir: str | None = None, + detach_keys: str | None = None, + ) -> dict[str, Incomplete]: ... + def exec_inspect(self, exec_id: str) -> dict[str, Incomplete]: ... + def exec_resize(self, exec_id: str, height: int | None = None, width: int | None = None) -> None: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[True], + tty: bool = False, + stream: bool = False, + socket: bool = False, + demux: bool = False, + ) -> bytes: ... + @overload + def exec_start( + self, exec_id: str, detach: Literal[False], tty: bool, stream: bool, socket: Literal[True], demux: bool = False + ) -> SocketIO | _BufferedReaderStream | SSHSocket: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False] = False, + tty: bool = False, + stream: bool = False, + *, + socket: Literal[True], + demux: bool = False, + ) -> SocketIO | _BufferedReaderStream | SSHSocket: ... + @overload + def exec_start( + self, exec_id: str, detach: Literal[False], tty: bool, stream: Literal[True], socket: Literal[False], demux: Literal[True] + ) -> CancellableStream[tuple[bytes | None, bytes | None]]: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False] = False, + tty: bool = False, + socket: Literal[False] = False, + *, + stream: Literal[True], + demux: Literal[True], + ) -> CancellableStream[tuple[bytes | None, bytes | None]]: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False], + tty: bool, + stream: Literal[True], + socket: Literal[False], + demux: Literal[False], + ) -> CancellableStream[bytes]: ... + @overload def exec_start( - self, exec_id, detach: bool = False, tty: bool = False, stream: bool = False, socket: bool = False, demux: bool = False + self, + exec_id: str, + detach: Literal[False] = False, + tty: bool = False, + *, + stream: Literal[True], + socket: Literal[False] = False, + demux: Literal[False] = False, + ) -> CancellableStream[bytes]: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False], + tty: bool, + stream: Literal[False], + socket: Literal[False], + demux: Literal[True], + ) -> tuple[bytes | None, bytes | None]: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False] = False, + tty: bool = False, + stream: Literal[False] = False, + socket: Literal[False] = False, + *, + demux: Literal[True], + ) -> tuple[bytes | None, bytes | None]: ... + @overload + def exec_start( + self, + exec_id: str, + detach: Literal[False] = False, + tty: bool = False, + stream: Literal[False] = False, + socket: Literal[False] = False, + demux: Literal[False] = False, + ) -> bytes: ... + @overload + def exec_start( + self, + exec_id: str, + detach: bool = False, + tty: bool = False, + stream: bool = False, + socket: bool = False, + demux: bool = False, + ) -> ( + str + | SocketIO + | _BufferedReaderStream + | SSHSocket + | CancellableStream[bytes] + | CancellableStream[tuple[bytes | None, bytes | None]] + | tuple[bytes | None, bytes | None] + | bytes ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/client.pyi index 7a96c165d7..05c7235c48 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/client.pyi @@ -1,5 +1,5 @@ from collections.abc import Iterable -from typing import NoReturn, Protocol, type_check_only +from typing import Any, Literal, NoReturn, Protocol, overload, type_check_only from docker import APIClient from docker.models.configs import ConfigCollection @@ -12,6 +12,7 @@ from docker.models.secrets import SecretCollection from docker.models.services import ServiceCollection from docker.models.swarm import Swarm from docker.models.volumes import VolumeCollection +from docker.types import CancellableStream @type_check_only class _Environ(Protocol): @@ -51,13 +52,16 @@ class DockerClient: def swarm(self) -> Swarm: ... @property def volumes(self) -> VolumeCollection: ... - def events(self, *args, **kwargs): ... - def df(self): ... - def info(self, *args, **kwargs): ... - def login(self, *args, **kwargs): ... - def ping(self, *args, **kwargs): ... - def version(self, *args, **kwargs): ... - def close(self): ... + @overload + def events(self, *args, decode: Literal[False] | None = None, **kwargs) -> CancellableStream[str]: ... + @overload + def events(self, *args, decode: Literal[True] = ..., **kwargs) -> CancellableStream[dict[str, Any]]: ... + def df(self) -> dict[str, Any]: ... + def info(self, *args, **kwargs) -> dict[str, Any]: ... + def login(self, *args, **kwargs) -> dict[str, Any]: ... + def ping(self, *args, **kwargs) -> bool: ... + def version(self, *args, **kwargs) -> dict[str, Any]: ... + def close(self) -> None: ... def __getattr__(self, name: str) -> NoReturn: ... from_env = DockerClient.from_env diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/containers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/containers.pyi index dddbb847c8..67097d2cc6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/containers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/containers.pyi @@ -1,10 +1,13 @@ import datetime +from _io import _BufferedReaderStream from _typeshed import Incomplete -from collections.abc import Iterable, Mapping +from collections.abc import Iterable, Iterator, Mapping +from socket import SocketIO from typing import Literal, NamedTuple, TypedDict, overload, type_check_only from typing_extensions import NotRequired from docker._types import ContainerWeightDevice, WaitContainerResponse +from docker.transport.sshconn import SSHSocket from docker.types import EndpointConfig from docker.types.containers import DeviceRequest, LogConfig, Ulimit from docker.types.daemon import CancellableStream @@ -18,6 +21,11 @@ class _RestartPolicy(TypedDict): MaximumRetryCount: NotRequired[int] Name: NotRequired[Literal["always", "on-failure"]] +@type_check_only +class _TopResult(TypedDict): + Titles: list[str] + Processes: list[list[str]] + class Container(Model): @property def name(self) -> str | None: ... @@ -31,10 +39,12 @@ class Container(Model): def health(self) -> str: ... @property def ports(self) -> dict[Incomplete, Incomplete]: ... - def attach(self, **kwargs): ... - def attach_socket(self, **kwargs): ... - def commit(self, repository: str | None = None, tag: str | None = None, **kwargs): ... - def diff(self): ... + def attach( + self, **kwargs + ) -> str | tuple[str | None, str | None] | CancellableStream[str] | CancellableStream[tuple[str | None, str | None]]: ... + def attach_socket(self, **kwargs) -> SocketIO | _BufferedReaderStream | SSHSocket: ... + def commit(self, repository: str | None = None, tag: str | None = None, **kwargs) -> Image: ... + def diff(self) -> list[dict[str, Incomplete]]: ... def exec_run( self, cmd: str | list[str], @@ -47,15 +57,15 @@ class Container(Model): detach: bool = False, stream: bool = False, socket: bool = False, - environment=None, - workdir=None, + environment: dict[str, str] | list[str] | None = None, + workdir: str | None = None, demux: bool = False, ) -> ExecResult: ... def export(self, chunk_size: int | None = 2097152) -> str: ... def get_archive( self, path: str, chunk_size: int | None = 2097152, encode_stream: bool = False ) -> tuple[Incomplete, Incomplete]: ... - def kill(self, signal=None): ... + def kill(self, signal: str | int | None = None) -> None: ... @overload def logs( self, @@ -85,14 +95,14 @@ class Container(Model): def pause(self) -> None: ... def put_archive(self, path: str, data) -> bool: ... def remove(self, *, v: bool = False, link: bool = False, force: bool = False) -> None: ... - def rename(self, name: str): ... - def resize(self, height: int, width: int): ... - def restart(self, *, timeout: float | None = 10): ... + def rename(self, name: str) -> None: ... + def resize(self, height: int, width: int) -> None: ... + def restart(self, *, timeout: float | None = 10) -> None: ... def start(self) -> None: ... - def stats(self, **kwargs): ... + def stats(self, **kwargs) -> Iterator[dict[str, Incomplete]] | dict[str, Incomplete]: ... def stop(self, *, timeout: float | None = None) -> None: ... - def top(self, *, ps_args: str | None = None) -> str: ... - def unpause(self): ... + def top(self, *, ps_args: str | None = None) -> _TopResult: ... + def unpause(self) -> None: ... def update( self, *, @@ -400,13 +410,13 @@ class ContainerCollection(Collection[Container]): self, all: bool = False, before: str | None = None, - filters=None, + filters: dict[str, Incomplete] | None = None, limit: int = -1, since: str | None = None, sparse: bool = False, ignore_removed: bool = False, - ): ... - def prune(self, filters=None): ... + ) -> list[Container]: ... + def prune(self, filters: dict[str, Incomplete] | None = None) -> dict[str, Incomplete]: ... RUN_CREATE_KWARGS: list[str] RUN_HOST_CONFIG_KWARGS: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/images.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/images.pyi index 7de317a325..cf1e54ae0c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/images.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/images.pyi @@ -26,7 +26,7 @@ class Image(Model): def tags(self) -> list[str]: ... def history(self) -> list[Any]: ... def remove(self, force: bool = False, noprune: bool = False) -> dict[str, Any]: ... - def save(self, chunk_size: int = 2097152, named: bool = False) -> Iterator[Any]: ... + def save(self, chunk_size: int = 2097152, named: str | bool = False) -> Iterator[Any]: ... def tag(self, repository: str, tag: str | None = None, **kwargs) -> bool: ... class RegistryData(Model): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/resource.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/resource.pyi index 9356ef88f8..8f81f52301 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/resource.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docker/docker/models/resource.pyi @@ -1,17 +1,17 @@ from typing import Any, Generic, NoReturn, TypeVar from typing_extensions import Self -from docker import APIClient +from docker import DockerClient _T = TypeVar("_T", bound=Model) class Model: id_attribute: str - client: APIClient | None + client: DockerClient | None collection: Collection[Self] | None attrs: dict[str, Any] def __init__( - self, attrs: dict[str, Any] | None = None, client: APIClient | None = None, collection: Collection[Self] | None = None + self, attrs: dict[str, Any] | None = None, client: DockerClient | None = None, collection: Collection[Self] | None = None ) -> None: ... def __eq__(self, other) -> bool: ... def __hash__(self) -> int: ... @@ -23,8 +23,8 @@ class Model: class Collection(Generic[_T]): model: type[_T] - client: APIClient - def __init__(self, client: APIClient | None = None) -> None: ... + client: DockerClient + def __init__(self, client: DockerClient | None = None) -> None: ... def __call__(self, *args, **kwargs) -> NoReturn: ... def list(self) -> list[_T]: ... def get(self, key: str) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml index 4a5df21d5c..1a84d3665e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.22.2" +version = "0.22.3" upstream_repository = "https://sourceforge.net/p/docutils/code" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi index d1fa231201..75f36f3608 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi @@ -12,6 +12,16 @@ _DirectiveFn: TypeAlias = Callable[[str], str | list[str]] class BasePseudoSection(Directive): option_spec: ClassVar[dict[str, _DirectiveFn]] node_class: ClassVar[type[nodes.Node] | None] + invalid_parents: ClassVar[ + tuple[ + type[nodes.SubStructural], + type[nodes.Bibliographic], + type[nodes.Decorative], + type[nodes.Body], + type[nodes.Part], + type[nodes.topic], + ] + ] def run(self): ... class Topic(BasePseudoSection): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/METADATA.toml new file mode 100644 index 0000000000..6307b2971b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.0.*" +upstream_repository = "https://foss.heptapod.net/openpyxl/et_xmlfile" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/__init__.pyi new file mode 100644 index 0000000000..ddf9bb3b81 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/__init__.pyi @@ -0,0 +1,9 @@ +from typing import Final + +from .xmlfile import xmlfile as xmlfile + +__version__: Final[str] +__author__: Final[str] +__license__: Final[str] +__author_email__: Final[str] +__url__: Final[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/incremental_tree.pyi b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/incremental_tree.pyi new file mode 100644 index 0000000000..3d7690a66d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/incremental_tree.pyi @@ -0,0 +1,170 @@ +import xml.etree.ElementTree as ET +from _typeshed import Unused +from collections.abc import Callable +from typing import Any, Literal, overload + +def current_global_nsmap() -> dict[str, str]: ... + +class IncrementalTree(ET.ElementTree): + def write( # type: ignore[override] + self, + file_or_filename: ET._FileWrite, + encoding: str | None = None, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + method: Literal["xml", "html", "text"] | None = None, # does not accept 'c14n', unlike parent method + *, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + ) -> None: ... + +def process_attribs( + elem: ET.Element[Any], + is_nsmap_scope_changed: bool | None, + default_ns_attr_prefix: str | None, + nsmap_scope: dict[str, str], + global_nsmap: dict[str, str], + new_namespace_prefixes: set[str], + uri_to_prefix: dict[str, str], +) -> tuple[list[tuple[str, str]], str | None, dict[str, str]]: ... +def write_elem_start( + write: Callable[..., None], + elem: ET.Element[Any], + nsmap_scope: dict[str, str], + global_nsmap: dict[str, str], + short_empty_elements: bool | None, + is_html: bool | None, + is_root: bool = False, + uri_to_prefix: dict[str, str] | None = None, + default_ns_attr_prefix: str | None = None, + new_nsmap: dict[str, str] | None = None, + **kwargs: Unused, +) -> tuple[str | None, dict[str, str], str | None, dict[str, str] | None, bool]: ... +@overload +def tostring( + element: ET.Element[Any], + encoding: None = None, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> bytes: ... +@overload +def tostring( + element: ET.Element[Any], + encoding: Literal["unicode"], + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> str: ... +@overload +def tostring( + element: ET.Element[Any], + encoding: str, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> Any: ... +@overload +def tostringlist( + element: ET.Element[Any], + encoding: None = None, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> list[bytes]: ... +@overload +def tostringlist( + element: ET.Element[Any], + encoding: Literal["unicode"], + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> list[str]: ... +@overload +def tostringlist( + element: ET.Element[Any], + encoding: str, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = False, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> list[Any]: ... +@overload +def compat_tostring( + element: ET.Element[Any], + encoding: None = None, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = True, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> bytes: ... +@overload +def compat_tostring( + element: ET.Element[Any], + encoding: Literal["unicode"], + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = True, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> str: ... +@overload +def compat_tostring( + element: ET.Element[Any], + encoding: str, + method: Literal["xml", "html", "text"] | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + nsmap: dict[str, str] | None = None, + root_ns_only: bool = True, + minimal_ns_only: bool = False, + tree_cls: type[ET.ElementTree] = ..., +) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/xmlfile.pyi b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/xmlfile.pyi new file mode 100644 index 0000000000..9e1f55e256 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/et_xmlfile/et_xmlfile/xmlfile.pyi @@ -0,0 +1,37 @@ +import types +import xml.etree.ElementTree as ET +from _typeshed import Incomplete +from collections.abc import Generator +from contextlib import contextmanager +from typing import Any + +class LxmlSyntaxError(Exception): ... + +class _IncrementalFileWriter: + global_nsmap: dict[str, str] + is_html: bool + def __init__(self, output_file: ET._FileWrite) -> None: ... + @contextmanager + def element( + self, + tag: str | ET._ElementCallable, + attrib: dict[str, str] | None = None, + nsmap: dict[str, str] | None = None, + **_extra: str, + ) -> Generator[None]: ... + def write(self, arg: str | ET.Element[Any]) -> None: ... + def __enter__(self) -> None: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None + ) -> None: ... + +class xmlfile: + encoding: str + writer_cm: Incomplete + def __init__( + self, output_file: ET._FileWrite, buffered: bool = False, encoding: str = "utf-8", close: bool = False + ) -> None: ... + def __enter__(self) -> _IncrementalFileWriter: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fanstatic/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/fanstatic/METADATA.toml index 563c927555..ece9065617 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/fanstatic/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/fanstatic/METADATA.toml @@ -1,3 +1,3 @@ -version = "1.5.*" +version = "1.6.*" upstream_repository = "https://github.com/zopefoundation/fanstatic" requires = ["types-setuptools", "types-WebOb"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml index 13d13490ad..bf4cac0bcc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml @@ -1,2 +1,2 @@ -version = "24.12.12" +version = "25.11.29" upstream_repository = "https://github.com/PyCQA/flake8-bugbear" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi index 3e70823959..2e92ede768 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi @@ -2,10 +2,10 @@ import argparse import ast import sys from _typeshed import Incomplete -from collections.abc import Callable, Generator, Iterable, Sequence +from collections.abc import Generator, Iterable, Sequence from functools import partial from logging import Logger -from typing import Any, ClassVar, Final, Literal, NamedTuple, overload +from typing import Any, ClassVar, Final, Literal, NamedTuple, Protocol, overload __version__: Final[str] LOG: Logger @@ -28,7 +28,7 @@ class BugBearChecker: max_line_length: int visitor: ast.NodeVisitor options: argparse.Namespace | None - def run(self) -> Generator[error]: ... + def run(self) -> Iterable[tuple[int, int, str, type[BugBearChecker]]]: ... def gen_line_based_checks(self) -> Generator[error]: ... @classmethod def adapt_error(cls, e: error) -> tuple[int, int, str, type[BugBearChecker]]: ... @@ -67,7 +67,19 @@ class B041VariableKeyType: name: str def __init__(self, name: str) -> None: ... +class AstPositionNode(Protocol): + lineno: int + col_offset: int + class BugBearVisitor(ast.NodeVisitor): + filename: str + lines: Sequence[str] | None + b008_b039_extend_immutable_calls: set[str] + b902_classmethod_decorators: set[str] + node_window: list[ast.AST] + errors: list[error] + contexts: list[Context] + b040_caught_exception: B040CaughtException | None NODE_WINDOW_SIZE: ClassVar[int] = 4 in_trystar: str def __init__( @@ -82,6 +94,7 @@ class BugBearVisitor(ast.NodeVisitor): b040_caught_exception: B040CaughtException | None = None, in_trystar: str = "", ) -> None: ... + def add_error(self, code: str, node: AstPositionNode, *vars: object) -> None: ... @property def node_stack(self) -> list[Context]: ... def in_class_init(self) -> bool: ... @@ -137,7 +150,9 @@ class BugBearVisitor(ast.NodeVisitor): def check_for_b017(self, node: ast.With | ast.AsyncWith) -> None: ... def check_for_b019(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: ... def check_for_b020(self, node: ast.For | ast.AsyncFor | ast.comprehension) -> None: ... - def check_for_b023(self, loop_node: ast.For | ast.AsyncFor | ast.comprehension) -> None: ... + def check_for_b023( + self, loop_node: ast.For | ast.AsyncFor | ast.While | ast.GeneratorExp | ast.SetComp | ast.ListComp | ast.DictComp + ) -> None: ... def check_for_b024_and_b027(self, node: ast.ClassDef) -> None: ... def check_for_b026(self, call: ast.Call) -> None: ... def check_for_b031(self, loop_node: ast.For | ast.AsyncFor) -> None: ... @@ -163,12 +178,14 @@ class BugBearVisitor(ast.NodeVisitor): def check_for_b908(self, node: ast.With) -> None: ... def check_for_b025(self, node: ast.Try) -> None: ... def check_for_b905(self, node: ast.Call) -> None: ... + def check_for_b912(self, node: ast.Call) -> None: ... def check_for_b906(self, node: ast.FunctionDef) -> None: ... def check_for_b907(self, node: ast.JoinedStr) -> None: ... def check_for_b028(self, node: ast.Call) -> None: ... def check_for_b032(self, node: ast.AnnAssign) -> None: ... def check_for_b033(self, node: ast.Set | ast.List | ast.Tuple) -> None: ... def check_for_b034(self, node: ast.Call) -> None: ... + def check_for_b042(self, node: ast.ClassDef) -> None: ... def check_for_b909(self, node: ast.For) -> None: ... def check_for_b910(self, node: ast.Call) -> None: ... def check_for_b911(self, node: ast.Call) -> None: ... @@ -180,7 +197,7 @@ class B909Checker(ast.NodeVisitor): MUTATING_FUNCTIONS: ClassVar[tuple[str, ...]] name: str key: str - mutations: dict[int, list[ast.AST]] + mutations: dict[int, list[ast.Assign | ast.AugAssign | ast.Delete | ast.Call]] def __init__(self, name: str, key: str) -> None: ... def visit_Assign(self, node: ast.Assign) -> None: ... def visit_AugAssign(self, node: ast.AugAssign) -> None: ... @@ -222,75 +239,30 @@ class B020NameFinder(NameFinder): def visit_comprehension(self, node: ast.comprehension) -> None: ... def visit_Lambda(self, node: ast.Lambda) -> None: ... -class error(NamedTuple): - lineno: int - col: int - message: str - type: type[BugBearChecker] - vars: tuple[Incomplete] - -Error: Callable[..., partial[error]] -B001: partial[error] -B002: partial[error] -B003: partial[error] -B004: partial[error] -B005: partial[error] B005_METHODS: Final[set[str]] -B006: partial[error] B006_MUTABLE_LITERALS: Final[tuple[Literal["Dict"], Literal["List"], Literal["Set"]]] B006_MUTABLE_COMPREHENSIONS: Final[tuple[Literal["ListComp"], Literal["DictComp"], Literal["SetComp"]]] B006_MUTABLE_CALLS: Final[set[str]] -B007: partial[error] -B008: partial[error] B008_IMMUTABLE_CALLS: Final[set[str]] -B009: partial[error] -B010: partial[error] -B011: partial[error] -B012: partial[error] -B013: partial[error] -B014: partial[error] B014_REDUNDANT_EXCEPTIONS: Final[dict[Literal["OSError", "ValueError"], set[str]]] -B015: partial[error] -B016: partial[error] -B017: partial[error] -B018: partial[error] -B019: partial[error] B019_CACHES: Final[set[str]] -B020: partial[error] -B021: partial[error] -B022: partial[error] -B023: partial[error] -B024: partial[error] -B025: partial[error] -B026: partial[error] -B027: partial[error] -B028: partial[error] -B029: partial[error] -B030: partial[error] -B031: partial[error] -B032: partial[error] -B033: partial[error] -B034: partial[error] -B035: partial[error] -B036: partial[error] -B037: partial[error] -B039: partial[error] -B040: partial[error] -B041: partial[error] -B901: partial[error] -B902: partial[error] B902_IMPLICIT_CLASSMETHODS: Final[set[str]] B902_SELF: Final[list[str]] B902_CLS: Final[list[str]] B902_METACLS: Final[list[str]] -B903: partial[error] -B904: partial[error] -B905: partial[error] -B906: partial[error] -B907: partial[error] -B908: partial[error] -B909: partial[error] -B910: partial[error] -B911: partial[error] -B950: partial[error] + +class error(NamedTuple): + lineno: int + col: int + message: str + type: type[BugBearChecker] + # Arguments for formatting the message, i.e. message.format(*vars). + vars: tuple[object, ...] + +class Error: + message: str + def __init__(self, message: str) -> None: ... + def __call__(self, lineno: int, col: int, vars: tuple[object, ...] = ()) -> error: ... + +error_codes: Final[dict[str, Error]] disabled_by_default: Final[list[str]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml index c50b847abe..3406949e4f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml @@ -1,3 +1,3 @@ -version = "3.0.*" +version = "3.1.*" upstream_repository = "https://github.com/gforcada/flake8-builtins" requires = ["types-flake8"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml index 5e130f0d97..55561890fd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.22.*" +version = "0.30.*" upstream_repository = "https://github.com/MartinThoma/flake8-simplify" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi index db32a3db41..6aaaeb0635 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi @@ -6,6 +6,7 @@ from typing import Any, ClassVar logger: logging.Logger class Visitor(ast.NodeVisitor): + errors: list[tuple[int, int, str]] def __init__(self) -> None: ... def visit_Assign(self, node: ast.Assign) -> None: ... def visit_Call(self, node: ast.Call) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/constants.pyi index 66c47b6ad8..58254db706 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/constants.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/constants.pyi @@ -1,3 +1,6 @@ -BOOL_CONST_TYPES: tuple[type, ...] -AST_CONST_TYPES: tuple[type, ...] -STR_TYPES: tuple[type, ...] +import ast +from typing import Final + +BOOL_CONST_TYPES: Final[tuple[type[ast.Constant]]] +AST_CONST_TYPES: Final[tuple[type[ast.Constant]]] +STR_TYPES: Final[tuple[type[ast.Constant]]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/utils.pyi index 3f4ea1c652..0610464020 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/utils.pyi @@ -24,7 +24,6 @@ class Assign(ast.Assign): def __init__(self, orig: ast.Assign) -> None: ... def to_source(node: ast.expr | ast.Expr | ast.withitem | ast.slice | ast.Assign | None) -> str: ... -def strip_parenthesis(string: str) -> str: ... def strip_triple_quotes(string: str) -> str: ... def use_double_quotes(string: str) -> str: ... def is_body_same(body1: list[ast.stmt], body2: list[ast.stmt]) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/drawing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/drawing.pyi index 1d0fbefb72..c393f1f7ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/drawing.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/drawing.pyi @@ -1,18 +1,36 @@ import decimal -from _typeshed import Incomplete +import sys +from _typeshed import Incomplete, SupportsWrite from collections import OrderedDict -from collections.abc import Callable, Generator, Sequence +from collections.abc import Callable, Generator, Iterable, Sequence from contextlib import contextmanager from re import Pattern -from typing import Any, ClassVar, Literal, NamedTuple, TypeVar, overload +from typing import Any, ClassVar, Literal, NamedTuple, Protocol, TypeVar, overload, type_check_only from typing_extensions import Self, TypeAlias +if sys.version_info >= (3, 10): + from types import EllipsisType +else: + # Rely on builtins.ellipsis + from builtins import ellipsis as EllipsisType + +from .enums import PathPaintRule from .syntax import Name, Raw __pdoc__: dict[str, bool] +_T = TypeVar("_T") _CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) +@type_check_only +class _SupportsSerialize(Protocol): + def serialize(self) -> str: ... + +@type_check_only +class _SupportsEndPoint(Protocol): + @property + def end_point(self) -> Point: ... + def force_nodocument(item: _CallableT) -> _CallableT: ... def force_document(item: _CallableT) -> _CallableT: ... @@ -23,12 +41,24 @@ EOL_CHARS: frozenset[str] DELIMITERS: frozenset[str] STR_ESC: Pattern[str] STR_ESC_MAP: dict[str, str] +_Primitive: TypeAlias = ( + _SupportsSerialize + | Number + | str + | bytes + | bool + | Raw + | list[_Primitive] + | tuple[_Primitive, ...] + | dict[Name, _Primitive] + | None +) class GraphicsStateDictRegistry(OrderedDict[Raw, Name]): def register_style(self, style: GraphicsStyle) -> Name | None: ... -def number_to_str(number) -> str: ... -def render_pdf_primitive(primitive) -> Raw: ... +def number_to_str(number: Number) -> str: ... +def render_pdf_primitive(primitive: _Primitive) -> Raw: ... class _DeviceRGBBase(NamedTuple): r: Number @@ -72,8 +102,8 @@ class DeviceCMYK(_DeviceCMYKBase): def colors(self) -> tuple[Number, Number, Number, Number]: ... def serialize(self) -> str: ... -def rgb8(r, g, b, a=None) -> DeviceRGB: ... -def gray8(g, a=None) -> DeviceGray: ... +def rgb8(r: Number, g: Number, b: Number, a: Number | None = None) -> DeviceRGB: ... +def gray8(g: Number, a: Number | None = None) -> DeviceGray: ... @overload def convert_to_device_color(r: DeviceCMYK) -> DeviceCMYK: ... @overload @@ -87,24 +117,24 @@ def convert_to_device_color(r: int, g: Literal[-1] = -1, b: Literal[-1] = -1) -> @overload def convert_to_device_color(r: Sequence[int] | int, g: int, b: int) -> DeviceGray | DeviceRGB: ... def cmyk8(c, m, y, k, a=None) -> DeviceCMYK: ... -def color_from_hex_string(hexstr) -> DeviceRGB: ... -def color_from_rgb_string(rgbstr) -> DeviceRGB: ... +def color_from_hex_string(hexstr: str) -> DeviceRGB: ... +def color_from_rgb_string(rgbstr: str) -> DeviceRGB: ... class Point(NamedTuple): x: Number y: Number - def render(self): ... - def dot(self, other): ... - def angle(self, other): ... - def mag(self): ... - def __add__(self, other): ... - def __sub__(self, other): ... - def __neg__(self): ... - def __mul__(self, other): ... - def __rmul__(self, other): ... - def __truediv__(self, other): ... - def __floordiv__(self, other): ... - def __matmul__(self, other): ... + def render(self) -> str: ... + def dot(self, other: Point) -> Number: ... + def angle(self, other: Point) -> float: ... + def mag(self) -> Number: ... + def __add__(self, other: Point) -> Point: ... # type: ignore[override] + def __sub__(self, other: Point) -> Point: ... + def __neg__(self) -> Point: ... + def __mul__(self, other: Number) -> Point: ... # type: ignore[override] + def __rmul__(self, other: Number) -> Point: ... # type: ignore[override] + def __truediv__(self, other: Number) -> Point: ... + def __floordiv__(self, other: Number) -> Point: ... + def __matmul__(self, other: Transform) -> Point: ... class Transform(NamedTuple): a: Number @@ -114,35 +144,35 @@ class Transform(NamedTuple): e: Number f: Number @classmethod - def identity(cls): ... + def identity(cls) -> Self: ... @classmethod - def translation(cls, x, y): ... + def translation(cls, x: Number, y: Number) -> Self: ... @classmethod - def scaling(cls, x, y=None): ... + def scaling(cls, x: Number, y: Number | None = None) -> Self: ... @classmethod - def rotation(cls, theta): ... + def rotation(cls, theta: Number) -> Self: ... @classmethod - def rotation_d(cls, theta_d): ... + def rotation_d(cls, theta_d: Number) -> Self: ... @classmethod - def shearing(cls, x, y=None): ... - def translate(self, x, y): ... - def scale(self, x, y=None): ... - def rotate(self, theta): ... - def rotate_d(self, theta_d): ... - def shear(self, x, y=None): ... - def about(self, x, y): ... - def __mul__(self, other): ... - def __rmul__(self, other): ... - def __matmul__(self, other): ... - def render(self, last_item): ... + def shearing(cls, x: Number, y: Number | None = None) -> Self: ... + def translate(self, x: Number, y: Number) -> Self: ... + def scale(self, x: Number, y: Number | None = None) -> Self: ... + def rotate(self, theta: Number) -> Self: ... + def rotate_d(self, theta_d: Number) -> Self: ... + def shear(self, x: Number, y: Number | None = None) -> Self: ... + def about(self, x: Number, y: Number) -> Transform: ... + def __mul__(self, other: Number) -> Transform: ... # type: ignore[override] + def __rmul__(self, other: Number) -> Transform: ... # type: ignore[override] + def __matmul__(self, other: Transform) -> Self: ... + def render(self, last_item: _T) -> tuple[str, _T]: ... class GraphicsStyle: - INHERIT: ClassVar[Incomplete] + INHERIT: ClassVar[EllipsisType] MERGE_PROPERTIES: ClassVar[tuple[str, ...]] TRANSPARENCY_KEYS: ClassVar[tuple[Name, ...]] PDF_STYLE_KEYS: ClassVar[tuple[Name, ...]] @classmethod - def merge(cls, parent, child): ... + def merge(cls, parent, child) -> Self: ... def __init__(self) -> None: ... def __deepcopy__(self, memo) -> Self: ... @property @@ -150,13 +180,13 @@ class GraphicsStyle: @allow_transparency.setter def allow_transparency(self, new): ... @property - def paint_rule(self): ... + def paint_rule(self) -> PathPaintRule | EllipsisType: ... @paint_rule.setter - def paint_rule(self, new) -> None: ... + def paint_rule(self, new: PathPaintRule | str | EllipsisType | None) -> None: ... @property - def auto_close(self): ... + def auto_close(self) -> bool | EllipsisType: ... @auto_close.setter - def auto_close(self, new) -> None: ... + def auto_close(self, new: bool | EllipsisType) -> None: ... @property def intersection_rule(self): ... @intersection_rule.setter @@ -172,7 +202,7 @@ class GraphicsStyle: @property def stroke_color(self): ... @stroke_color.setter - def stroke_color(self, color) -> None: ... + def stroke_color(self, color: str | DeviceRGB | DeviceGray | DeviceCMYK | EllipsisType | None) -> None: ... @property def stroke_opacity(self): ... @stroke_opacity.setter @@ -184,7 +214,7 @@ class GraphicsStyle: @property def stroke_width(self): ... @stroke_width.setter - def stroke_width(self, width) -> None: ... + def stroke_width(self, width: Number | EllipsisType | None) -> None: ... @property def stroke_cap_style(self): ... @stroke_cap_style.setter @@ -196,36 +226,66 @@ class GraphicsStyle: @property def stroke_miter_limit(self): ... @stroke_miter_limit.setter - def stroke_miter_limit(self, value) -> None: ... + def stroke_miter_limit(self, value: Number | EllipsisType) -> None: ... @property def stroke_dash_pattern(self): ... @stroke_dash_pattern.setter - def stroke_dash_pattern(self, value) -> None: ... + def stroke_dash_pattern(self, value: Number | Iterable[Number] | EllipsisType | None) -> None: ... @property def stroke_dash_phase(self): ... @stroke_dash_phase.setter - def stroke_dash_phase(self, value): ... + def stroke_dash_phase(self, value: Number | EllipsisType): ... def serialize(self) -> Raw | None: ... - def resolve_paint_rule(self): ... + def resolve_paint_rule(self) -> PathPaintRule: ... class Move(NamedTuple): pt: Point @property - def end_point(self): ... - def render(self, gsd_registry, style, last_item, initial_point): ... - def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + def end_point(self) -> Point: ... + def render( + self, gsd_registry: GraphicsStateDictRegistry, style: GraphicsStyle, last_item: _SupportsEndPoint, initial_point: Point + ) -> tuple[str, Self, Point]: ... + def render_debug( + self, + gsd_registry: GraphicsStateDictRegistry, + style: GraphicsStyle, + last_item: _SupportsEndPoint, + initial_point: Point, + debug_stream: SupportsWrite[str], + pfx: str, + ) -> tuple[str, Self, Point]: ... class RelativeMove(NamedTuple): pt: Point - def render(self, gsd_registry, style, last_item, initial_point): ... - def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + def render( + self, gsd_registry: GraphicsStateDictRegistry, style: GraphicsStyle, last_item: _SupportsEndPoint, initial_point: Point + ) -> tuple[str, Move, Point]: ... + def render_debug( + self, + gsd_registry: GraphicsStateDictRegistry, + style: GraphicsStyle, + last_item: _SupportsEndPoint, + initial_point: Point, + debug_stream: SupportsWrite[str], + pfx: str, + ) -> tuple[str, Move, Point]: ... class Line(NamedTuple): pt: Point @property - def end_point(self): ... - def render(self, gsd_registry, style, last_item, initial_point): ... - def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + def end_point(self) -> Point: ... + def render( + self, gsd_registry: GraphicsStateDictRegistry, style: GraphicsStyle, last_item: _SupportsEndPoint, initial_point: Point + ) -> tuple[str, Self, Point]: ... + def render_debug( + self, + gsd_registry: GraphicsStateDictRegistry, + style: GraphicsStyle, + last_item: _SupportsEndPoint, + initial_point: Point, + debug_stream: SupportsWrite[str], + pfx: str, + ) -> tuple[str, Self, Point]: ... class RelativeLine(NamedTuple): pt: Point @@ -257,7 +317,7 @@ class BezierCurve(NamedTuple): c2: Point end: Point @property - def end_point(self): ... + def end_point(self) -> Point: ... def render(self, gsd_registry, style, last_item, initial_point): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... @@ -272,7 +332,7 @@ class QuadraticBezierCurve(NamedTuple): ctrl: Point end: Point @property - def end_point(self): ... + def end_point(self) -> Point: ... def to_cubic_curve(self, start_point): ... def render(self, gsd_registry, style, last_item, initial_point): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... @@ -290,7 +350,7 @@ class Arc(NamedTuple): sweep: bool end: Point @staticmethod - def subdivde_sweep(sweep_angle) -> Generator[Incomplete, None, None]: ... + def subdivde_sweep(sweep_angle: Number) -> Generator[tuple[Point, Point, Point]]: ... def render(self, gsd_registry, style, last_item, initial_point): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... @@ -337,10 +397,10 @@ class DrawingContext: def render_debug(self, gsd_registry, first_point, scale, height, starting_style, debug_stream): ... class PaintedPath: - def __init__(self, x: int = 0, y: int = 0) -> None: ... + def __init__(self, x: Number = 0, y: Number = 0) -> None: ... def __deepcopy__(self, memo) -> Self: ... @property - def style(self): ... + def style(self) -> GraphicsStyle: ... @property def transform(self): ... @transform.setter @@ -361,30 +421,34 @@ class PaintedPath: def transform_group(self, transform) -> Generator[Self]: ... def add_path_element(self, item, _copy: bool = True) -> None: ... def remove_last_path_element(self) -> None: ... - def rectangle(self, x, y, w, h, rx: int = 0, ry: int = 0) -> Self: ... - def circle(self, cx, cy, r) -> Self: ... - def ellipse(self, cx, cy, rx, ry) -> Self: ... - def move_to(self, x, y) -> Self: ... - def move_relative(self, x, y) -> Self: ... - def line_to(self, x, y) -> Self: ... - def line_relative(self, dx, dy) -> Self: ... - def horizontal_line_to(self, x) -> Self: ... - def horizontal_line_relative(self, dx) -> Self: ... - def vertical_line_to(self, y) -> Self: ... - def vertical_line_relative(self, dy) -> Self: ... - def curve_to(self, x1, y1, x2, y2, x3, y3) -> Self: ... - def curve_relative(self, dx1, dy1, dx2, dy2, dx3, dy3) -> Self: ... - def quadratic_curve_to(self, x1, y1, x2, y2) -> Self: ... - def quadratic_curve_relative(self, dx1, dy1, dx2, dy2) -> Self: ... - def arc_to(self, rx, ry, rotation, large_arc, positive_sweep, x, y) -> Self: ... - def arc_relative(self, rx, ry, rotation, large_arc, positive_sweep, dx, dy) -> Self: ... + def rectangle(self, x: Number, y: Number, w: Number, h: Number, rx: Number = 0, ry: Number = 0) -> Self: ... + def circle(self, cx: Number, cy: Number, r: Number) -> Self: ... + def ellipse(self, cx: Number, cy: Number, rx: Number, ry: Number) -> Self: ... + def move_to(self, x: Number, y: Number) -> Self: ... + def move_relative(self, x: Number, y: Number) -> Self: ... + def line_to(self, x: Number, y: Number) -> Self: ... + def line_relative(self, dx: Number, dy: Number) -> Self: ... + def horizontal_line_to(self, x: Number) -> Self: ... + def horizontal_line_relative(self, dx: Number) -> Self: ... + def vertical_line_to(self, y: Number) -> Self: ... + def vertical_line_relative(self, dy: Number) -> Self: ... + def curve_to(self, x1: Number, y1: Number, x2: Number, y2: Number, x3: Number, y3: Number) -> Self: ... + def curve_relative(self, dx1: Number, dy1: Number, dx2: Number, dy2: Number, dx3: Number, dy3: Number) -> Self: ... + def quadratic_curve_to(self, x1: Number, y1: Number, x2: Number, y2: Number) -> Self: ... + def quadratic_curve_relative(self, dx1: Number, dy1: Number, dx2: Number, dy2: Number) -> Self: ... + def arc_to( + self, rx: Number, ry: Number, rotation: Number, large_arc: bool, positive_sweep: bool, x: Number, y: Number + ) -> Self: ... + def arc_relative( + self, rx: Number, ry: Number, rotation: Number, large_arc: bool, positive_sweep: bool, dx: Number, dy: Number + ) -> Self: ... def close(self) -> None: ... def render(self, gsd_registry, style, last_item, initial_point, debug_stream=None, pfx=None): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... class ClippingPath(PaintedPath): - paint_rule: Incomplete - def __init__(self, x: int = 0, y: int = 0) -> None: ... + paint_rule: PathPaintRule + def __init__(self, x: Number = 0, y: Number = 0) -> None: ... def render(self, gsd_registry, style, last_item, initial_point, debug_stream=None, pfx=None): ... def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/text_region.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/text_region.pyi index 481a173b16..e5b62cfa83 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/text_region.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/text_region.pyi @@ -14,7 +14,7 @@ class Extents(NamedTuple): class TextRegionMixin: def __init__(self, *args, **kwargs) -> None: ... def register_text_region(self, region) -> None: ... - def is_current_text_region(self, region): ... + def is_current_text_region(self, region) -> bool: ... def clear_text_region(self) -> None: ... class LineWrapper(NamedTuple): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/METADATA.toml index 97cd71db2c..eabfde716d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/METADATA.toml @@ -1,4 +1,4 @@ -version = "1.1.1" +version = "1.1.2" # Requires a version of numpy with a `py.typed` file requires = ["numpy>=1.20", "pandas-stubs", "types-shapely", "pyproj"] upstream_repository = "https://github.com/geopandas/geopandas" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/array.pyi b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/array.pyi index 7572dd5b5c..ca9596b43c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/array.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/array.pyi @@ -1,12 +1,13 @@ import builtins from _typeshed import Incomplete, Unused -from collections.abc import Callable, Collection, Sequence +from collections.abc import Callable, Collection from typing import Any, ClassVar, Final, Literal, NoReturn, SupportsIndex, TypeVar, overload from typing_extensions import Self, TypeAlias, deprecated import numpy as np import pandas as pd from numpy.typing import ArrayLike, DTypeLike, NDArray +from pandas._typing import ScalarIndexer, SequenceIndexer, TakeIndexer from pandas.api.extensions import ExtensionArray, ExtensionDtype from pyproj import CRS, Transformer from shapely import Geometry @@ -68,15 +69,9 @@ class GeometryArray(ExtensionArray): def __len__(self) -> int: ... # np.integer[Any] because precision is not important @overload - def __getitem__(self, idx: int | np.integer[Any]) -> BaseGeometry: ... # Always 1-D, doesn't accept tuple + def __getitem__(self, idx: ScalarIndexer) -> BaseGeometry: ... # Always 1-D, doesn't accept tuple @overload - def __getitem__( - self, idx: slice | Sequence[SupportsIndex] | NDArray[np.bool_] | NDArray[np.integer[Any]] - ) -> GeometryArray: ... - @overload - def __getitem__( - self, idx: int | np.integer[Any] | slice | Sequence[int] | NDArray[np.bool_] | NDArray[np.integer[Any]] - ) -> BaseGeometry | GeometryArray: ... + def __getitem__(self, idx: SequenceIndexer) -> GeometryArray: ... def __setitem__( self, key, value: _ArrayOrGeom | pd.DataFrame | pd.Series[Any] # Cannot use pd.Series[BaseGeometry] ) -> None: ... @@ -222,17 +217,15 @@ class GeometryArray(ExtensionArray): @property def ndim(self) -> Literal[1]: ... def copy(self, *args: Unused, **kwargs: Unused) -> GeometryArray: ... - def take( - self, indices: Sequence[SupportsIndex] | NDArray[np.integer], allow_fill: bool = False, fill_value: Geometry | None = None - ) -> GeometryArray: ... - def fillna( + def take(self, indices: TakeIndexer, allow_fill: bool = False, fill_value: Geometry | None = None) -> GeometryArray: ... + def fillna( # type: ignore[override] self, value: Geometry | GeometryArray | None = None, method: Literal["backfill", "bfill", "pad", "ffill"] | None = None, limit: int | None = None, copy: bool = True, ) -> GeometryArray: ... - @overload + @overload # type: ignore[override] def astype(self, dtype: GeometryDtype, copy: bool = True) -> GeometryArray: ... @overload def astype(self, dtype: ExtensionDtype | Literal["string"], copy: bool = True) -> ExtensionArray: ... # type: ignore[overload-overlap] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/geodataframe.pyi b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/geodataframe.pyi index e59df53cec..ea6b8d1fdb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/geodataframe.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/geopandas/geopandas/geodataframe.pyi @@ -282,6 +282,7 @@ class GeoDataFrame(GeoPandasBase, pd.DataFrame): # type: ignore[misc] def to_crs(self, crs: _ConvertibleToCRS | None, epsg: int, inplace: Literal[True]) -> None: ... def estimate_utm_crs(self, datum_name: str = "WGS 84") -> CRS: ... # def __getitem__(self, key): ... + def __delitem__(self, key) -> None: ... # type: ignore[misc] # def __setitem__(self, key, value) -> None: ... def copy(self, deep: bool = True) -> Self: ... # type: ignore[misc] # def merge(self, *args, **kwargs) -> GeoDataFrame | pd.DataFrame: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gevent/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/gevent/METADATA.toml index dce29acee5..58869c1ac9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gevent/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/gevent/METADATA.toml @@ -1,6 +1,6 @@ -version = "25.4.*" +version = "25.9.*" upstream_repository = "https://github.com/gevent/gevent" -requires = ["types-greenlet", "types-psutil"] +requires = ["types-greenlet", "types-psutil>=7.2.0"] [tool.stubtest] # Run stubtest on all platforms, since there is some platform specific stuff diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/events.pyi index ffc491711f..237d84ba28 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/events.pyi @@ -1,4 +1,3 @@ -import sys from collections.abc import Callable, Mapping, Sequence from types import ModuleType from typing import Any, Protocol, TypeVar, type_check_only @@ -6,6 +5,7 @@ from typing_extensions import TypeAlias from gevent.hub import Hub from greenlet import greenlet as greenlet_t +from psutil._ntuples import pmem _T = TypeVar("_T") # FIXME: While it would be nice to import Interface from zope.interface here so the @@ -17,17 +17,6 @@ Interface: TypeAlias = Any def implementer(interface: Interface, /) -> Callable[[_T], _T]: ... -# this is copied from types-psutil, it would be nice if we could just import this -# but it doesn't seem like we can... -if sys.platform == "linux": - from psutil._pslinux import pmem -elif sys.platform == "darwin": - from psutil._psosx import pmem -elif sys.platform == "win32": - from psutil._pswindows import pmem -else: - class pmem(Any): ... - subscribers: list[Callable[[Any], object]] @type_check_only diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/libuv/watcher.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/libuv/watcher.pyi index af5b5e6331..0f008f490f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/libuv/watcher.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/libuv/watcher.pyi @@ -14,6 +14,7 @@ class io(_base.IoMixin, watcher): @events.setter def events(self, value: int) -> None: ... def multiplex(self, events: int) -> _IoWatcher: ... + def close_all(self) -> None: ... class fork(_base.ForkMixin, watcher): ... class child(_base.ChildMixin, watcher): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/os.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/os.pyi index 279312a03b..ac7d0cf0b9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/os.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/os.pyi @@ -10,6 +10,7 @@ def tp_read(fd: FileDescriptor, n: int) -> bytes: ... def tp_write(fd: FileDescriptor, buf: ReadableBuffer) -> int: ... if sys.platform != "win32": + def close(fd: FileDescriptor) -> None: ... def make_nonblocking(fd: FileDescriptor) -> Literal[True] | None: ... def nb_read(fd: FileDescriptor, n: int) -> bytes: ... def nb_write(fd: FileDescriptor, buf: ReadableBuffer) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/signal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/signal.pyi index 4c08f33ff5..66d277ac68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/signal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gevent/gevent/signal.pyi @@ -8,5 +8,6 @@ from signal import _HANDLER, _SIGNUM if sys.platform != "win32": def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: ... - __all__ = ["signal", "getsignal"] + __all__ = ["signal", "getsignal", "set_wakeup_fd"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml index a0eac2308f..5b272480a4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml @@ -1,4 +1,4 @@ -version = "2.3.*" +version = "2.4.*" upstream_repository = "https://github.com/googleapis/python-ndb" partial_stub = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi index e40a6ddb34..3f416ed306 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from typing import Any from google.cloud.ndb import tasklets as tasklets @@ -13,11 +12,11 @@ class _GlobalCacheBatch: def make_call(self) -> None: ... def future_info(self, key) -> None: ... -global_get: Any +global_get: Incomplete class _GlobalCacheGetBatch(_GlobalCacheBatch): - todo: Any - keys: Any + todo: Incomplete + keys: Incomplete def __init__(self, ignore_options) -> None: ... def add(self, key): ... def done_callback(self, cache_call) -> None: ... @@ -27,7 +26,7 @@ class _GlobalCacheGetBatch(_GlobalCacheBatch): def global_set(key, value, expires: Incomplete | None = ..., read: bool = ...): ... class _GlobalCacheSetBatch(_GlobalCacheBatch): - expires: Any + expires: Incomplete todo: object futures: object def __init__(self, options) -> None: ... @@ -41,17 +40,17 @@ class _GlobalCacheSetIfNotExistsBatch(_GlobalCacheSetBatch): def make_call(self): ... def future_info(self, key, value): ... # type:ignore[override] -global_delete: Any +global_delete: Incomplete class _GlobalCacheDeleteBatch(_GlobalCacheBatch): - keys: Any - futures: Any + keys: Incomplete + futures: Incomplete def __init__(self, ignore_options) -> None: ... def add(self, key): ... def make_call(self): ... def future_info(self, key): ... -global_watch: Any +global_watch: Incomplete class _GlobalCacheWatchBatch(_GlobalCacheDeleteBatch): def make_call(self): ... @@ -63,7 +62,7 @@ class _GlobalCacheUnwatchBatch(_GlobalCacheDeleteBatch): def make_call(self): ... def future_info(self, key): ... -global_compare_and_swap: Any +global_compare_and_swap: Incomplete class _GlobalCacheCompareAndSwapBatch(_GlobalCacheSetBatch): def make_call(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi index 597b8810e9..93454c9e1b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from typing import Any class QueryIterator: def __iter__(self): ... @@ -14,7 +13,7 @@ class QueryIterator: class Cursor: @classmethod def from_websafe_string(cls, urlsafe): ... - cursor: Any + cursor: Incomplete def __init__(self, cursor: Incomplete | None = ..., urlsafe: Incomplete | None = ...) -> None: ... def to_websafe_string(self): ... def urlsafe(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi index 2c80cd3751..47091d2d39 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi @@ -1,18 +1,19 @@ -from typing import Any, NamedTuple +from _typeshed import Incomplete +from typing import NamedTuple class _Event(NamedTuple): - when: Any - callback: Any - args: Any - kwargs: Any + when: Incomplete + callback: Incomplete + args: Incomplete + kwargs: Incomplete class EventLoop: - current: Any - idlers: Any + current: Incomplete + idlers: Incomplete inactive: int - queue: Any - rpcs: Any - rpc_results: Any + queue: Incomplete + rpcs: Incomplete + rpc_results: Incomplete def __init__(self) -> None: ... def clear(self) -> None: ... def insort_event_right(self, event) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi index acb002aadc..8db54d2a19 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi @@ -1,8 +1,8 @@ -from typing import Any +from _typeshed import Incomplete from google.cloud.ndb import model -BlobKey: Any +BlobKey: Incomplete BLOB_INFO_KIND: str BLOB_MIGRATION_KIND: str BLOB_KEY_HEADER: str @@ -51,10 +51,10 @@ class Error: def fetch_data(*args, **kwargs) -> None: ... def fetch_data_async(*args, **kwargs) -> None: ... -get: Any -get_async: Any -get_multi: Any -get_multi_async: Any +get: Incomplete +get_async: Incomplete +get_multi: Incomplete +get_multi_async: Incomplete class InternalError: def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi index ec1ca4029d..b5234d052a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi @@ -1,6 +1,6 @@ from _typeshed import Incomplete from collections.abc import Callable -from typing import Any, NamedTuple +from typing import NamedTuple from google.cloud.ndb import Key, exceptions as exceptions @@ -19,18 +19,18 @@ def get_context(raise_context_error: bool = ...): ... def get_toplevel_context(raise_context_error: bool = ...): ... class _ContextTuple(NamedTuple): - id: Any - client: Any - namespace: Any - eventloop: Any - batches: Any - commit_batches: Any - transaction: Any - cache: Any - global_cache: Any - on_commit_callbacks: Any - transaction_complete_callbacks: Any - legacy_data: Any + id: Incomplete + client: Incomplete + namespace: Incomplete + eventloop: Incomplete + batches: Incomplete + commit_batches: Incomplete + transaction: Incomplete + cache: Incomplete + global_cache: Incomplete + on_commit_callbacks: Incomplete + transaction_complete_callbacks: Incomplete + legacy_data: Incomplete class _Context(_ContextTuple): def __new__( @@ -67,19 +67,19 @@ class Context(_Context): def get_cache_policy(self): ... def get_datastore_policy(self) -> None: ... def get_global_cache_policy(self): ... - get_memcache_policy: Any + get_memcache_policy: Incomplete def get_global_cache_timeout_policy(self): ... - get_memcache_timeout_policy: Any - cache_policy: Any + get_memcache_timeout_policy: Incomplete + cache_policy: Incomplete def set_cache_policy(self, policy): ... - datastore_policy: Any + datastore_policy: Incomplete def set_datastore_policy(self, policy): ... - global_cache_policy: Any + global_cache_policy: Incomplete def set_global_cache_policy(self, policy): ... - set_memcache_policy: Any - global_cache_timeout_policy: Any + set_memcache_policy: Incomplete + global_cache_timeout_policy: Incomplete def set_global_cache_timeout_policy(self, policy): ... - set_memcache_timeout_policy: Any + set_memcache_timeout_policy: Incomplete def get_retry_state(self): ... def set_retry_state(self, state) -> None: ... def clear_retry_state(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi index ab5a3a0ae0..c575423699 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete class Error(Exception): ... @@ -12,7 +12,7 @@ class Rollback(Error): ... class BadQueryError(Error): ... class BadFilterError(Error): - filter: Any + filter: Incomplete def __init__(self, filter) -> None: ... class NoLongerImplementedError(NotImplementedError): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi index 3fca22af04..aa91d31411 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi @@ -1,13 +1,12 @@ import abc from _typeshed import Incomplete -from typing import Any from typing_extensions import Self -ConnectionError: Any +ConnectionError: Incomplete class GlobalCache(metaclass=abc.ABCMeta): - __metaclass__: Any - transient_errors: Any + __metaclass__: Incomplete + transient_errors: Incomplete strict_read: bool strict_write: bool @abc.abstractmethod @@ -26,7 +25,7 @@ class GlobalCache(metaclass=abc.ABCMeta): def clear(self): ... class _InProcessGlobalCache(GlobalCache): - cache: Any + cache: Incomplete def __init__(self) -> None: ... def get(self, keys): ... def set(self, items, expires: Incomplete | None = ...) -> None: ... @@ -37,12 +36,12 @@ class _InProcessGlobalCache(GlobalCache): def clear(self) -> None: ... class RedisCache(GlobalCache): - transient_errors: Any + transient_errors: Incomplete @classmethod def from_environment(cls, strict_read: bool = ..., strict_write: bool = ...) -> Self: ... - redis: Any - strict_read: Any - strict_write: Any + redis: Incomplete + strict_read: Incomplete + strict_write: Incomplete def __init__(self, redis, strict_read: bool = ..., strict_write: bool = ...) -> None: ... @property def pipes(self): ... @@ -56,16 +55,16 @@ class RedisCache(GlobalCache): class MemcacheCache(GlobalCache): class KeyNotSet(Exception): - key: Any + key: Incomplete def __init__(self, key) -> None: ... def __eq__(self, other): ... - transient_errors: Any + transient_errors: Incomplete @classmethod def from_environment(cls, max_pool_size: int = ..., strict_read: bool = ..., strict_write: bool = ...) -> Self: ... - client: Any - strict_read: Any - strict_write: Any + client: Incomplete + strict_read: Incomplete + strict_write: Incomplete def __init__(self, client, strict_read: bool = ..., strict_write: bool = ...) -> None: ... @property def caskeys(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi index 292e5dd9d0..fde8894a39 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi @@ -1,7 +1,6 @@ from _typeshed import Incomplete -from typing import Any -UNDEFINED: Any +UNDEFINED: Incomplete class Key: def __new__(cls, *path_args, **kwargs): ... @@ -17,7 +16,7 @@ class Key: def root(self): ... def namespace(self): ... def project(self): ... - app: Any + app: Incomplete def database(self) -> str | None: ... def id(self): ... def string_id(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi index 3f52576f44..4e22eee359 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from typing import Any from google.cloud.ndb import model @@ -32,7 +31,7 @@ class Property(_BaseMetadata): def property_name(self): ... @property def kind_name(self): ... - property_representation: Any + property_representation: Incomplete @classmethod def key_for_kind(cls, kind): ... @classmethod diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi index 43cabd2888..29d9aa8b5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete from google.cloud.ndb import model @@ -6,4 +6,4 @@ class _ClassKeyProperty(model.StringProperty): def __init__(self, name=..., indexed: bool = ...) -> None: ... class PolyModel(model.Model): - class_: Any + class_: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi index 07ff7c4136..939a79444b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi @@ -1,18 +1,17 @@ from _typeshed import Incomplete -from typing import Any from google.cloud.ndb import _options class PropertyOrder: - name: Any - reverse: Any + name: Incomplete + reverse: Incomplete def __init__(self, name, reverse: bool = ...) -> None: ... def __neg__(self): ... class RepeatedStructuredPropertyPredicate: - name: Any - match_keys: Any - match_values: Any + name: Incomplete + match_keys: Incomplete + match_values: Incomplete def __init__(self, name, match_keys, entity_pb) -> None: ... def __call__(self, entity_pb): ... @@ -28,8 +27,8 @@ class Parameter(ParameterizedThing): def resolve(self, bindings, used): ... class ParameterizedFunction(ParameterizedThing): - func: Any - values: Any + func: Incomplete + values: Incomplete def __init__(self, func, values) -> None: ... def __eq__(self, other): ... def is_parameterized(self): ... @@ -65,9 +64,9 @@ class PostFilterNode(Node): def __eq__(self, other): ... class _BooleanClauses: - name: Any - combine_or: Any - or_parts: Any + name: Incomplete + combine_or: Incomplete + or_parts: Incomplete def __init__(self, name, combine_or) -> None: ... def add_node(self, node) -> None: ... @@ -89,24 +88,24 @@ AND = ConjunctionNode OR = DisjunctionNode class QueryOptions(_options.ReadOptions): - project: Any - namespace: Any + project: Incomplete + namespace: Incomplete database: str | None def __init__(self, config: Incomplete | None = ..., context: Incomplete | None = ..., **kwargs) -> None: ... class Query: - default_options: Any - kind: Any - ancestor: Any - filters: Any - order_by: Any - project: Any - namespace: Any - limit: Any - offset: Any - keys_only: Any - projection: Any - distinct_on: Any + default_options: Incomplete + kind: Incomplete + ancestor: Incomplete + filters: Incomplete + order_by: Incomplete + project: Incomplete + namespace: Incomplete + limit: Incomplete + offset: Incomplete + keys_only: Incomplete + projection: Incomplete + distinct_on: Incomplete database: str | None def __init__( self, @@ -136,7 +135,7 @@ class Query: def fetch_async(self, limit: Incomplete | None = ..., **kwargs): ... def run_to_queue(self, queue, conn, options: Incomplete | None = ..., dsquery: Incomplete | None = ...) -> None: ... def iter(self, **kwargs): ... - __iter__: Any + __iter__: Incomplete def map(self, callback, **kwargs): ... def map_async(self, callback, **kwargs) -> None: ... def get(self, **kwargs): ... @@ -146,4 +145,4 @@ class Query: def fetch_page(self, page_size, **kwargs): ... def fetch_page_async(self, page_size, **kwargs) -> None: ... -def gql(query_string: str, *args: Any, **kwds: Any) -> Query: ... +def gql(query_string: str, *args, **kwds) -> Query: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi index 1ffa02ff04..7dfaaa5c70 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi @@ -1,41 +1,41 @@ -from typing import Any +from _typeshed import Incomplete from google.cloud.ndb import model class BaseStatistic(model.Model): STORED_KIND_NAME: str - bytes: Any - count: Any - timestamp: Any + bytes: Incomplete + count: Incomplete + timestamp: Incomplete class BaseKindStatistic(BaseStatistic): STORED_KIND_NAME: str - kind_name: Any - entity_bytes: Any + kind_name: Incomplete + entity_bytes: Incomplete class GlobalStat(BaseStatistic): STORED_KIND_NAME: str - entity_bytes: Any - builtin_index_bytes: Any - builtin_index_count: Any - composite_index_bytes: Any - composite_index_count: Any + entity_bytes: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete + composite_index_bytes: Incomplete + composite_index_count: Incomplete class NamespaceStat(BaseStatistic): STORED_KIND_NAME: str - subject_namespace: Any - entity_bytes: Any - builtin_index_bytes: Any - builtin_index_count: Any - composite_index_bytes: Any - composite_index_count: Any + subject_namespace: Incomplete + entity_bytes: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete + composite_index_bytes: Incomplete + composite_index_count: Incomplete class KindStat(BaseKindStatistic): STORED_KIND_NAME: str - builtin_index_bytes: Any - builtin_index_count: Any - composite_index_bytes: Any - composite_index_count: Any + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete + composite_index_bytes: Incomplete + composite_index_count: Incomplete class KindRootEntityStat(BaseKindStatistic): STORED_KIND_NAME: str @@ -45,34 +45,34 @@ class KindNonRootEntityStat(BaseKindStatistic): class PropertyTypeStat(BaseStatistic): STORED_KIND_NAME: str - property_type: Any - entity_bytes: Any - builtin_index_bytes: Any - builtin_index_count: Any + property_type: Incomplete + entity_bytes: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete class KindPropertyTypeStat(BaseKindStatistic): STORED_KIND_NAME: str - property_type: Any - builtin_index_bytes: Any - builtin_index_count: Any + property_type: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete class KindPropertyNameStat(BaseKindStatistic): STORED_KIND_NAME: str - property_name: Any - builtin_index_bytes: Any - builtin_index_count: Any + property_name: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete class KindPropertyNamePropertyTypeStat(BaseKindStatistic): STORED_KIND_NAME: str - property_type: Any - property_name: Any - builtin_index_bytes: Any - builtin_index_count: Any + property_type: Incomplete + property_name: Incomplete + builtin_index_bytes: Incomplete + builtin_index_count: Incomplete class KindCompositeIndexStat(BaseStatistic): STORED_KIND_NAME: str - index_id: Any - kind_name: Any + index_id: Incomplete + kind_name: Incomplete class NamespaceGlobalStat(GlobalStat): STORED_KIND_NAME: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi index 4d9f34d697..8788f40f6c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi @@ -1,7 +1,7 @@ -from typing import Any +from _typeshed import Incomplete class Future: - info: Any + info: Incomplete def __init__(self, info: str = ...) -> None: ... def done(self): ... def running(self): ... @@ -10,9 +10,9 @@ class Future: def set_result(self, result) -> None: ... def set_exception(self, exception) -> None: ... def result(self): ... - get_result: Any + get_result: Incomplete def exception(self): ... - get_exception: Any + get_exception: Incomplete def get_traceback(self): ... def add_done_callback(self, callback) -> None: ... def cancel(self) -> None: ... @@ -23,9 +23,9 @@ class Future: def wait_all(futures): ... class _TaskletFuture(Future): - generator: Any - context: Any - waiting_on: Any + generator: Incomplete + context: Incomplete + waiting_on: Incomplete def __init__(self, generator, context, info: str = ...) -> None: ... def cancel(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi index 3ddb96071d..ccc5ca208d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi @@ -1,11 +1,11 @@ import threading -from typing import Any +from _typeshed import Incomplete -TRUTHY_STRINGS: Any +TRUTHY_STRINGS: Incomplete def asbool(value): ... -DEBUG: Any +DEBUG: Incomplete def code_info(*args, **kwargs) -> None: ... def decorator(*args, **kwargs) -> None: ... @@ -16,7 +16,7 @@ def get_stack(*args, **kwargs) -> None: ... def logging_debug(log, message, *args, **kwargs) -> None: ... class keyword_only: - defaults: Any + defaults: Incomplete def __init__(self, **kwargs) -> None: ... def __call__(self, wrapped): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/greenlet/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/greenlet/METADATA.toml index 7b3f60b4c2..c1978835e4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/greenlet/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/greenlet/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.2.*" +version = "3.3.*" upstream_repository = "https://github.com/python-greenlet/greenlet" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/arbiter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/arbiter.pyi index 0f7ae45f5d..8d4f221aa8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/arbiter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/arbiter.pyi @@ -1,10 +1,10 @@ -import socket from types import FrameType from typing import ClassVar from gunicorn.app.base import BaseApplication from gunicorn.config import Config from gunicorn.glogging import Logger as GLogger +from gunicorn.sock import BaseSocket from gunicorn.workers.base import Worker from ._types import _AddressType @@ -14,7 +14,7 @@ class Arbiter: WORKER_BOOT_ERROR: ClassVar[int] APP_LOAD_ERROR: ClassVar[int] START_CTX: ClassVar[dict[int | str, str | list[str]]] - LISTENERS: ClassVar[list[socket.socket]] + LISTENERS: ClassVar[list[BaseSocket]] WORKERS: ClassVar[dict[int, Worker]] PIPE: ClassVar[list[int]] SIG_QUEUE: ClassVar[list[int]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/sock.pyi b/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/sock.pyi index 4ce1b77cf8..0c19078abe 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/sock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/gunicorn/gunicorn/sock.pyi @@ -9,6 +9,8 @@ from gunicorn.glogging import Logger as GLogger from .config import Config class BaseSocket: + sock: socket.socket + def __init__(self, address: str, conf: Config, log: GLogger, fd: SupportsIndex | None = None) -> None: ... def __getattr__(self, name: str) -> Any: ... def set_options(self, sock: socket.socket, bound: bool = False) -> socket.socket: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml index 23a4684130..3f4ba61f3d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml @@ -1,5 +1,6 @@ version = "1.1.*" upstream_repository = "https://github.com/html5lib/html5lib-python" +requires = ["types-webencodings"] [tool.stubtest] extras = ["all"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_ihatexml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_ihatexml.pyi index 44115cfbab..4f0844635a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_ihatexml.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_ihatexml.pyi @@ -1,6 +1,4 @@ import re -from _typeshed import Incomplete -from collections.abc import Iterable baseChar: str ideographic: str @@ -13,13 +11,13 @@ nameFirst: str reChar: re.Pattern[str] reCharRange: re.Pattern[str] -def charStringToList(chars: str) -> list[str]: ... -def normaliseCharList(charList: Iterable[str]) -> list[str]: ... +def charStringToList(chars: str) -> list[list[int]]: ... +def normaliseCharList(charList: list[list[int]]) -> list[list[int]]: ... max_unicode: int -def missingRanges(charList: Iterable[str]) -> list[str]: ... -def listToRegexpStr(charList): ... +def missingRanges(charList: list[list[int]]) -> list[list[int]]: ... +def listToRegexpStr(charList: list[list[int]]) -> str: ... def hexToInt(hex_str: str | bytes | bytearray) -> int: ... def escapeRegexp(string: str) -> str: ... @@ -29,13 +27,13 @@ nonPubidCharRegexp: re.Pattern[str] class InfosetFilter: replacementRegexp: re.Pattern[str] - dropXmlnsLocalName: Incomplete - dropXmlnsAttrNs: Incomplete - preventDoubleDashComments: Incomplete - preventDashAtCommentEnd: Incomplete - replaceFormFeedCharacters: Incomplete - preventSingleQuotePubid: Incomplete - replaceCache: Incomplete + dropXmlnsLocalName: bool + dropXmlnsAttrNs: bool + preventDoubleDashComments: bool + preventDashAtCommentEnd: bool + replaceFormFeedCharacters: bool + preventSingleQuotePubid: bool + replaceCache: dict[str, str] def __init__( self, dropXmlnsLocalName: bool = False, @@ -45,13 +43,13 @@ class InfosetFilter: replaceFormFeedCharacters: bool = True, preventSingleQuotePubid: bool = False, ) -> None: ... - def coerceAttribute(self, name, namespace=None): ... - def coerceElement(self, name): ... - def coerceComment(self, data): ... - def coerceCharacters(self, data): ... - def coercePubid(self, data): ... - def toXmlName(self, name): ... - def getReplacementCharacter(self, char): ... - def fromXmlName(self, name): ... - def escapeChar(self, char): ... - def unescapeChar(self, charcode): ... + def coerceAttribute(self, name: str, namespace: str | None = None) -> str | None: ... + def coerceElement(self, name: str) -> str: ... + def coerceComment(self, data: str) -> str: ... + def coerceCharacters(self, data: str) -> str: ... + def coercePubid(self, data: str) -> str: ... + def toXmlName(self, name: str) -> str: ... + def getReplacementCharacter(self, char: str) -> str: ... + def fromXmlName(self, name: str) -> str: ... + def escapeChar(self, char: str) -> str: ... + def unescapeChar(self, charcode: str | bytes | bytearray) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_inputstream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_inputstream.pyi index 9f10e4dbf9..53a94876db 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_inputstream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_inputstream.pyi @@ -1,37 +1,36 @@ -from _typeshed import Incomplete, SupportsRead -from codecs import CodecInfo -from typing import Protocol, overload, type_check_only -from typing_extensions import TypeAlias +import re +from _io import BytesIO, StringIO +from _typeshed import Incomplete, ReadableBuffer, SupportsRead +from collections.abc import Callable, Iterable +from typing import Any, AnyStr, Generic, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias -# Is actually webencodings.Encoding -@type_check_only -class _Encoding(Protocol): - name: str - codec_info: CodecInfo - def __init__(self, name: str, codec_info: CodecInfo) -> None: ... +from webencodings import Encoding _UnicodeInputStream: TypeAlias = str | SupportsRead[str] _BinaryInputStream: TypeAlias = bytes | SupportsRead[bytes] _InputStream: TypeAlias = _UnicodeInputStream | _BinaryInputStream # noqa: Y047 # used in other files +_SupportsReadT = TypeVar("_SupportsReadT", bound=SupportsRead[Any]) +_SupportsReadBytesT = TypeVar("_SupportsReadBytesT", bound=SupportsRead[bytes]) -spaceCharactersBytes: Incomplete -asciiLettersBytes: Incomplete -asciiUppercaseBytes: Incomplete -spacesAngleBrackets: Incomplete +spaceCharactersBytes: frozenset[bytes] +asciiLettersBytes: frozenset[bytes] +asciiUppercaseBytes: frozenset[bytes] +spacesAngleBrackets: frozenset[bytes] invalid_unicode_no_surrogate: str -invalid_unicode_re: Incomplete -non_bmp_invalid_codepoints: Incomplete -ascii_punctuation_re: Incomplete -charsUntilRegEx: Incomplete +invalid_unicode_re: re.Pattern[str] +non_bmp_invalid_codepoints: set[int] +ascii_punctuation_re: re.Pattern[str] +charsUntilRegEx: dict[tuple[Iterable[str | bytes | bytearray], bool], re.Pattern[str]] -class BufferedStream: - stream: Incomplete - buffer: Incomplete - position: Incomplete - def __init__(self, stream) -> None: ... - def tell(self): ... - def seek(self, pos) -> None: ... - def read(self, bytes): ... +class BufferedStream(Generic[AnyStr]): + stream: SupportsRead[AnyStr] + buffer: list[AnyStr] + position: list[int] + def __init__(self, stream: SupportsRead[AnyStr]) -> None: ... + def tell(self) -> int: ... + def seek(self, pos: int) -> None: ... + def read(self, bytes: int) -> AnyStr: ... @overload def HTMLInputStream(source: _UnicodeInputStream) -> HTMLUnicodeInputStream: ... @@ -48,9 +47,9 @@ def HTMLInputStream( ) -> HTMLBinaryInputStream: ... class HTMLUnicodeInputStream: - reportCharacterErrors: Incomplete - newLines: Incomplete - charEncoding: tuple[_Encoding, str] + reportCharacterErrors: Callable[[str], None] + newLines: list[int] + charEncoding: tuple[Encoding, str] dataStream: Incomplete def __init__(self, source: _UnicodeInputStream) -> None: ... chunk: str @@ -60,14 +59,17 @@ class HTMLUnicodeInputStream: prevNumLines: int prevNumCols: int def reset(self) -> None: ... - def openStream(self, source): ... + @overload + def openStream(self, source: _SupportsReadT) -> _SupportsReadT: ... + @overload + def openStream(self, source: str | None) -> StringIO: ... def position(self) -> tuple[int, int]: ... - def char(self): ... - def readChunk(self, chunkSize=None): ... - def characterErrorsUCS4(self, data) -> None: ... - def characterErrorsUCS2(self, data) -> None: ... - def charsUntil(self, characters, opposite: bool = False): ... - def unget(self, char) -> None: ... + def char(self) -> str | None: ... + def readChunk(self, chunkSize: int | None = None) -> bool: ... + def characterErrorsUCS4(self, data: str) -> None: ... + def characterErrorsUCS2(self, data: str) -> None: ... + def charsUntil(self, characters: Iterable[str | bytes | bytearray], opposite: bool = False) -> str: ... + def unget(self, char: str | None) -> None: ... class HTMLBinaryInputStream(HTMLUnicodeInputStream): rawStream: Incomplete @@ -77,8 +79,8 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): transport_encoding: Incomplete same_origin_parent_encoding: Incomplete likely_encoding: Incomplete - default_encoding: Incomplete - charEncoding: tuple[_Encoding, str] + default_encoding: str + charEncoding: tuple[Encoding, str] def __init__( self, source: _BinaryInputStream, @@ -91,46 +93,52 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): ) -> None: ... dataStream: Incomplete def reset(self) -> None: ... - def openStream(self, source): ... + @overload # type: ignore[override] + def openStream(self, source: _SupportsReadBytesT) -> _SupportsReadBytesT: ... + @overload # type: ignore[override] + def openStream(self, source: ReadableBuffer) -> BytesIO: ... def determineEncoding(self, chardet: bool = True): ... def changeEncoding(self, newEncoding: str | bytes | None) -> None: ... - def detectBOM(self): ... - def detectEncodingMeta(self): ... + def detectBOM(self) -> Encoding | None: ... + def detectEncodingMeta(self) -> Encoding | None: ... class EncodingBytes(bytes): - def __new__(self, value): ... - def __init__(self, value) -> None: ... - def __iter__(self): ... - def __next__(self): ... - def next(self): ... - def previous(self): ... - def setPosition(self, position) -> None: ... - def getPosition(self): ... - position: Incomplete - def getCurrentByte(self): ... + def __new__(self, value: bytes) -> Self: ... + def __init__(self, value: bytes) -> None: ... + def __iter__(self) -> Self: ... # type: ignore[override] + def __next__(self) -> bytes: ... + def next(self) -> bytes: ... + def previous(self) -> bytes: ... + def setPosition(self, position: int) -> None: ... + def getPosition(self) -> int | None: ... @property - def currentByte(self): ... - def skip(self, chars=...): ... - def skipUntil(self, chars): ... - def matchBytes(self, bytes): ... - def jumpTo(self, bytes): ... + def position(self) -> int | None: ... + @position.setter + def position(self, position: int) -> None: ... + def getCurrentByte(self) -> bytes: ... + @property + def currentByte(self) -> bytes: ... + def skip(self, chars: bytes | bytearray | Iterable[bytes] = ...) -> bytes | None: ... + def skipUntil(self, chars: bytes | bytearray | Iterable[bytes]) -> bytes | None: ... + def matchBytes(self, bytes: bytes | bytearray) -> bool: ... + def jumpTo(self, bytes: bytes | bytearray) -> Literal[True]: ... class EncodingParser: - data: Incomplete - encoding: Incomplete - def __init__(self, data) -> None: ... - def getEncoding(self): ... - def handleComment(self): ... - def handleMeta(self): ... - def handlePossibleStartTag(self): ... - def handlePossibleEndTag(self): ... - def handlePossibleTag(self, endTag): ... - def handleOther(self): ... - def getAttribute(self): ... + data: EncodingBytes + encoding: Encoding | None + def __init__(self, data: bytes) -> None: ... + def getEncoding(self) -> Encoding | None: ... + def handleComment(self) -> bool: ... + def handleMeta(self) -> bool: ... + def handlePossibleStartTag(self) -> bool: ... + def handlePossibleEndTag(self) -> bool: ... + def handlePossibleTag(self, endTag: bool | None) -> bool: ... + def handleOther(self) -> bool: ... + def getAttribute(self) -> tuple[bytes, bytes] | None: ... class ContentAttrParser: - data: Incomplete - def __init__(self, data) -> None: ... - def parse(self): ... + data: EncodingBytes + def __init__(self, data: EncodingBytes) -> None: ... + def parse(self) -> bytes | None: ... -def lookupEncoding(encoding: str | bytes | None) -> str | None: ... +def lookupEncoding(encoding: str | bytes | None) -> Encoding | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi index 22316b1112..53c2e1bec6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi @@ -1,42 +1,78 @@ from _typeshed import Incomplete +from collections import deque +from collections.abc import Callable, Iterator +from typing import TypedDict, overload, type_check_only -from ._inputstream import _InputStream +from ._inputstream import HTMLBinaryInputStream, HTMLUnicodeInputStream, _BinaryInputStream, _UnicodeInputStream +from ._trie import Trie -entitiesTrie: Incomplete +@type_check_only +class _DataVars(TypedDict, total=False): + data: str | None + charAsInt: int + +@type_check_only +class _Token(TypedDict, total=False): + type: int + data: str | list[str] + datavars: _DataVars + name: str + selfClosing: bool + selfClosingAcknowledged: bool + publicId: str | None + systemId: str | None + correct: bool + +entitiesTrie: Trie attributeMap = dict class HTMLTokenizer: - stream: Incomplete + # TODO: Use Protocol to allow subclasses to set `stream` that do not inherit from HTMLUnicodeInputStream + stream: HTMLUnicodeInputStream | HTMLBinaryInputStream parser: Incomplete escapeFlag: bool - lastFourChars: Incomplete - state: Incomplete + lastFourChars: list[Incomplete] + state: Callable[[], bool] escape: bool - currentToken: Incomplete - def __init__(self, stream: _InputStream, parser=None, **kwargs) -> None: ... - tokenQueue: Incomplete - def __iter__(self): ... - def consumeNumberEntity(self, isHex): ... - def consumeEntity(self, allowedChar=None, fromAttribute: bool = False) -> None: ... - def processEntityInAttribute(self, allowedChar) -> None: ... + currentToken: _Token | None + @overload + def __init__(self, stream: _UnicodeInputStream, parser=None) -> None: ... + @overload + def __init__( + self, + stream: _BinaryInputStream, + parser=None, + *, + override_encoding: str | bytes | None = None, + transport_encoding: str | bytes | None = None, + same_origin_parent_encoding: str | bytes | None = None, + likely_encoding: str | bytes | None = None, + default_encoding: str = "windows-1252", + useChardet: bool = True, + ) -> None: ... + tokenQueue: deque[_Token] + def __iter__(self) -> Iterator[_Token]: ... + def consumeNumberEntity(self, isHex: bool | None) -> str: ... + def consumeEntity(self, allowedChar: str | None = None, fromAttribute: bool = False) -> None: ... + def processEntityInAttribute(self, allowedChar: str | None) -> None: ... def emitCurrentToken(self) -> None: ... - def dataState(self): ... - def entityDataState(self): ... - def rcdataState(self): ... - def characterReferenceInRcdata(self): ... - def rawtextState(self): ... - def scriptDataState(self): ... - def plaintextState(self): ... - def tagOpenState(self): ... - def closeTagOpenState(self): ... - def tagNameState(self): ... + def dataState(self) -> bool: ... + def entityDataState(self) -> bool: ... + def rcdataState(self) -> bool: ... + def characterReferenceInRcdata(self) -> bool: ... + def rawtextState(self) -> bool: ... + def scriptDataState(self) -> bool: ... + def plaintextState(self) -> bool: ... + def tagOpenState(self) -> bool: ... + def closeTagOpenState(self) -> bool: ... + def tagNameState(self) -> bool: ... temporaryBuffer: str - def rcdataLessThanSignState(self): ... - def rcdataEndTagOpenState(self): ... - def rcdataEndTagNameState(self): ... - def rawtextLessThanSignState(self): ... - def rawtextEndTagOpenState(self): ... - def rawtextEndTagNameState(self): ... + def rcdataLessThanSignState(self) -> bool: ... + def rcdataEndTagOpenState(self) -> bool: ... + def rcdataEndTagNameState(self) -> bool: ... + def rawtextLessThanSignState(self) -> bool: ... + def rawtextEndTagOpenState(self) -> bool: ... + def rawtextEndTagNameState(self) -> bool: ... def scriptDataLessThanSignState(self) -> bool: ... def scriptDataEndTagOpenState(self) -> bool: ... def scriptDataEndTagNameState(self) -> bool: ... @@ -54,17 +90,17 @@ class HTMLTokenizer: def scriptDataDoubleEscapedDashDashState(self) -> bool: ... def scriptDataDoubleEscapedLessThanSignState(self) -> bool: ... def scriptDataDoubleEscapeEndState(self) -> bool: ... - def beforeAttributeNameState(self): ... - def attributeNameState(self): ... - def afterAttributeNameState(self): ... - def beforeAttributeValueState(self): ... - def attributeValueDoubleQuotedState(self): ... - def attributeValueSingleQuotedState(self): ... - def attributeValueUnQuotedState(self): ... - def afterAttributeValueState(self): ... - def selfClosingStartTagState(self): ... - def bogusCommentState(self): ... - def markupDeclarationOpenState(self): ... + def beforeAttributeNameState(self) -> bool: ... + def attributeNameState(self) -> bool: ... + def afterAttributeNameState(self) -> bool: ... + def beforeAttributeValueState(self) -> bool: ... + def attributeValueDoubleQuotedState(self) -> bool: ... + def attributeValueSingleQuotedState(self) -> bool: ... + def attributeValueUnQuotedState(self) -> bool: ... + def afterAttributeValueState(self) -> bool: ... + def selfClosingStartTagState(self) -> bool: ... + def bogusCommentState(self) -> bool: ... + def markupDeclarationOpenState(self) -> bool: ... def commentStartState(self) -> bool: ... def commentStartDashState(self) -> bool: ... def commentState(self) -> bool: ... @@ -76,15 +112,15 @@ class HTMLTokenizer: def doctypeNameState(self) -> bool: ... def afterDoctypeNameState(self) -> bool: ... def afterDoctypePublicKeywordState(self) -> bool: ... - def beforeDoctypePublicIdentifierState(self): ... - def doctypePublicIdentifierDoubleQuotedState(self): ... - def doctypePublicIdentifierSingleQuotedState(self): ... - def afterDoctypePublicIdentifierState(self): ... - def betweenDoctypePublicAndSystemIdentifiersState(self): ... - def afterDoctypeSystemKeywordState(self): ... - def beforeDoctypeSystemIdentifierState(self): ... - def doctypeSystemIdentifierDoubleQuotedState(self): ... - def doctypeSystemIdentifierSingleQuotedState(self): ... - def afterDoctypeSystemIdentifierState(self): ... - def bogusDoctypeState(self): ... - def cdataSectionState(self): ... + def beforeDoctypePublicIdentifierState(self) -> bool: ... + def doctypePublicIdentifierDoubleQuotedState(self) -> bool: ... + def doctypePublicIdentifierSingleQuotedState(self) -> bool: ... + def afterDoctypePublicIdentifierState(self) -> bool: ... + def betweenDoctypePublicAndSystemIdentifiersState(self) -> bool: ... + def afterDoctypeSystemKeywordState(self) -> bool: ... + def beforeDoctypeSystemIdentifierState(self) -> bool: ... + def doctypeSystemIdentifierDoubleQuotedState(self) -> bool: ... + def doctypeSystemIdentifierSingleQuotedState(self) -> bool: ... + def afterDoctypeSystemIdentifierState(self) -> bool: ... + def bogusDoctypeState(self) -> bool: ... + def cdataSectionState(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi index f41afba041..70abe9494a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi @@ -1,6 +1,7 @@ import xml.etree.ElementTree as default_etree -from _typeshed import Incomplete -from collections.abc import Mapping +from _typeshed import Incomplete, Unused +from collections.abc import Iterable, Mapping, Sequence +from typing import Final, TypeVar, overload __all__ = [ "default_etree", @@ -11,13 +12,19 @@ __all__ = [ "supports_lone_surrogates", ] -supports_lone_surrogates: bool +supports_lone_surrogates: Final[bool] -class MethodDispatcher(dict[Incomplete, Incomplete]): - default: Incomplete - def __init__(self, items=()) -> None: ... - def __getitem__(self, key): ... - def __get__(self, instance, owner=None): ... +_K = TypeVar("_K") +_V = TypeVar("_V") + +class MethodDispatcher(dict[_K, _V]): + default: _V | None + @overload # to solve `reportInvalidTypeVarUse` + def __init__(self) -> None: ... + @overload + def __init__(self, items: Iterable[tuple[_K | Iterable[_K], _V]]) -> None: ... + def __getitem__(self, key: _K) -> _V | None: ... # type: ignore[override] + def __get__(self, instance, owner: Unused = None) -> BoundMethodDispatcher: ... class BoundMethodDispatcher(Mapping[Incomplete, Incomplete]): instance: Incomplete @@ -27,8 +34,8 @@ class BoundMethodDispatcher(Mapping[Incomplete, Incomplete]): def get(self, key, default): ... # type: ignore[override] def __iter__(self): ... def __len__(self) -> int: ... - def __contains__(self, key): ... + def __contains__(self, key) -> bool: ... -def isSurrogatePair(data): ... -def surrogatePairToCodepoint(data): ... +def isSurrogatePair(data: Sequence[str | bytes | bytearray]) -> bool: ... +def surrogatePairToCodepoint(data: Sequence[str | bytes | bytearray]) -> int: ... def moduleFactoryFactory(factory): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/constants.pyi index a0e12fbff9..a5c4c5289a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/constants.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/constants.pyi @@ -1,6 +1,4 @@ -from _typeshed import Incomplete - -EOF: Incomplete +EOF: None E: dict[str, str] namespaces: dict[str, str] scopingElements: frozenset[tuple[str, str]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi index 20b694d65e..955bae07d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi @@ -1,4 +1,5 @@ +from _typeshed import Incomplete + from . import base -class Filter(base.Filter): - def __iter__(self): ... +class Filter(base.Filter[dict[str, Incomplete]]): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/base.pyi index 82773fd912..ddbaf78f74 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/base.pyi @@ -1,7 +1,10 @@ -from _typeshed import Incomplete +from collections.abc import Iterable, Iterator +from typing import Any, Generic, TypeVar -class Filter: - source: Incomplete - def __init__(self, source) -> None: ... - def __iter__(self): ... - def __getattr__(self, name: str): ... +_T = TypeVar("_T", default=Any) + +class Filter(Generic[_T]): + source: Iterable[_T] + def __init__(self, source: Iterable[_T]) -> None: ... + def __iter__(self) -> Iterator[_T]: ... + def __getattr__(self, name: str) -> Any: ... # Depends on `source` diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi index 030cd63753..d8bb75bfcf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi @@ -1,6 +1,8 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + from . import base -class Filter(base.Filter): +class Filter(base.Filter[dict[str, Incomplete]]): encoding: str | None - def __init__(self, source, encoding: str | None) -> None: ... - def __iter__(self): ... + def __init__(self, source: Iterable[dict[str, Incomplete]], encoding: str | None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/lint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/lint.pyi index 39629c8696..37da9926b4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/lint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/lint.pyi @@ -1,8 +1,10 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + from . import base spaceCharacters: str -class Filter(base.Filter): +class Filter(base.Filter[dict[str, Incomplete]]): require_matching_tags: bool - def __init__(self, source, require_matching_tags: bool = True) -> None: ... - def __iter__(self): ... + def __init__(self, source: Iterable[dict[str, Incomplete]], require_matching_tags: bool = True) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/optionaltags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/optionaltags.pyi index 81045a0e06..9050d864e7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/optionaltags.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/optionaltags.pyi @@ -3,8 +3,7 @@ from collections.abc import Generator from . import base -class Filter(base.Filter): +class Filter(base.Filter[dict[str, Incomplete]]): def slider(self) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... - def __iter__(self): ... def is_optional_start(self, tagname: str, previous, next) -> bool: ... def is_optional_end(self, tagname: str, next) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/sanitizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/sanitizer.pyi index 7761f2719a..3308d7fd60 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/sanitizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/sanitizer.pyi @@ -1,4 +1,5 @@ import re +from _typeshed import Incomplete from collections.abc import Iterable from typing_extensions import deprecated @@ -19,7 +20,7 @@ allowed_content_types: frozenset[str] data_content_type: re.Pattern[str] @deprecated("html5lib's sanitizer is deprecated; see https://github.com/html5lib/html5lib-python/issues/443") -class Filter(base.Filter): +class Filter(base.Filter[dict[str, Incomplete]]): allowed_elements: Iterable[tuple[str | None, str]] allowed_attributes: Iterable[tuple[str | None, str]] allowed_css_properties: Iterable[str] @@ -32,7 +33,7 @@ class Filter(base.Filter): svg_allow_local_href: Iterable[tuple[str | None, str]] def __init__( self, - source, + source: Iterable[dict[str, Incomplete]], allowed_elements: Iterable[tuple[str | None, str]] = ..., allowed_attributes: Iterable[tuple[str | None, str]] = ..., allowed_css_properties: Iterable[str] = ..., @@ -44,8 +45,7 @@ class Filter(base.Filter): svg_attr_val_allows_ref: Iterable[tuple[str | None, str]] = ..., svg_allow_local_href: Iterable[tuple[str | None, str]] = ..., ) -> None: ... - def __iter__(self): ... - def sanitize_token(self, token): ... - def allowed_token(self, token): ... - def disallowed_token(self, token): ... + def sanitize_token(self, token: dict[str, Incomplete]) -> dict[str, Incomplete] | None: ... + def allowed_token(self, token: dict[str, Incomplete]) -> dict[str, Incomplete]: ... + def disallowed_token(self, token: dict[str, Incomplete]) -> dict[str, Incomplete]: ... def sanitize_css(self, style: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/whitespace.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/whitespace.pyi index 39f818100c..89334b0709 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/whitespace.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/filters/whitespace.pyi @@ -1,12 +1,12 @@ import re +from _typeshed import Incomplete from . import base spaceCharacters: str SPACES_REGEX: re.Pattern[str] -class Filter(base.Filter): +class Filter(base.Filter[dict[str, Incomplete]]): spacePreserveElements: frozenset[str] - def __iter__(self): ... def collapse_spaces(text: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/html5parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/html5parser.pyi index ad3adaca4e..114ce03ca2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/html5parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/html5parser.pyi @@ -54,9 +54,9 @@ class HTMLParser: def reparseTokenNormal(self, token: dict[str, Any]) -> None: ... def resetInsertionMode(self) -> None: ... originalPhase: Incomplete - def parseRCDataRawtext(self, token, contentType) -> None: ... + def parseRCDataRawtext(self, token, contentType: Literal["RAWTEXT", "RCDATA"]) -> None: ... -def getPhases(debug): ... +def getPhases(debug: bool | None) -> dict[str, type]: ... def adjust_attributes(token: dict[str, Any], replacements: dict[str, Any]) -> None: ... def impliedTagToken( name: str, type: str = "EndTag", attributes: dict[str, Any] | None = None, selfClosing: bool = False diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/serializer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/serializer.pyi index e7232e2e1d..e831607ff0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/serializer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/serializer.pyi @@ -1,20 +1,53 @@ from _typeshed import Incomplete from collections.abc import Generator -from typing import overload +from typing import Literal, overload -k: str -v: str | int - -def htmlentityreplace_errors(exc: Exception) -> tuple[str | bytes, int]: ... -@overload -def serialize(input, tree: str = "etree", encoding: None = None, **serializer_opts) -> str: ... +def htmlentityreplace_errors(exc: UnicodeError) -> tuple[str | bytes, int]: ... @overload -def serialize(input, tree: str, encoding: str, **serializer_opts) -> bytes: ... +def serialize( + input, + tree: Literal["dom", "genshi", "lxml", "etree"] = "etree", + encoding: Literal[""] | None = None, + *, + quote_attr_values: Literal["legacy", "spec", "always"] = "legacy", + quote_char: str = '"', + use_best_quote_char: bool = ..., # default value depends on whether quote_char was passed + omit_optional_tags: bool = True, + minimize_boolean_attributes: bool = True, + use_trailing_solidus: bool = False, + space_before_trailing_solidus: bool = True, + escape_lt_in_attrs: bool = False, + escape_rcdata: bool = False, + resolve_entities: bool = True, + alphabetical_attributes: bool = False, + inject_meta_charset: bool = True, + strip_whitespace: bool = False, + sanitize: bool = False, +) -> str: ... @overload -def serialize(input, *, encoding: str, **serializer_opts) -> bytes: ... +def serialize( + input, + tree: Literal["dom", "genshi", "lxml", "etree"] = "etree", + encoding: str = ..., + *, + quote_attr_values: Literal["legacy", "spec", "always"] = "legacy", + quote_char: str = '"', + use_best_quote_char: bool = ..., # default value depends on whether quote_char was passed + omit_optional_tags: bool = True, + minimize_boolean_attributes: bool = True, + use_trailing_solidus: bool = False, + space_before_trailing_solidus: bool = True, + escape_lt_in_attrs: bool = False, + escape_rcdata: bool = False, + resolve_entities: bool = True, + alphabetical_attributes: bool = False, + inject_meta_charset: bool = True, + strip_whitespace: bool = False, + sanitize: bool = False, +) -> bytes: ... class HTMLSerializer: - quote_attr_values: str + quote_attr_values: Literal["legacy", "spec", "always"] quote_char: str use_best_quote_char: bool omit_optional_tags: bool @@ -28,15 +61,38 @@ class HTMLSerializer: inject_meta_charset: bool strip_whitespace: bool sanitize: bool - options: Incomplete - errors: Incomplete + options: tuple[str, ...] + errors: list[Incomplete] strict: bool - def __init__(self, **kwargs) -> None: ... - def encode(self, string): ... - def encodeStrict(self, string): ... - encoding: Incomplete - def serialize(self, treewalker, encoding=None) -> Generator[Incomplete]: ... - def render(self, treewalker, encoding=None): ... - def serializeError(self, data: str = "XXX ERROR MESSAGE NEEDED") -> None: ... + def __init__( + self, + *, + quote_attr_values: Literal["legacy", "spec", "always"] = "legacy", + quote_char: str = '"', + use_best_quote_char: bool = ..., # default value depends on whether quote_char was passed + omit_optional_tags: bool = True, + minimize_boolean_attributes: bool = True, + use_trailing_solidus: bool = False, + space_before_trailing_solidus: bool = True, + escape_lt_in_attrs: bool = False, + escape_rcdata: bool = False, + resolve_entities: bool = True, + alphabetical_attributes: bool = False, + inject_meta_charset: bool = True, + strip_whitespace: bool = False, + sanitize: bool = False, + ) -> None: ... + def encode(self, string: str) -> str | bytes: ... # result depends on self.encoding + def encodeStrict(self, string: str) -> str | bytes: ... # result depends on self.encoding + encoding: str | None + @overload + def serialize(self, treewalker, encoding: Literal[""] | None = None) -> Generator[str]: ... + @overload + def serialize(self, treewalker, encoding: str = ...) -> Generator[bytes]: ... + @overload + def render(self, treewalker, encoding: Literal[""] | None = None) -> str: ... + @overload + def render(self, treewalker, encoding: str = ...) -> bytes: ... + def serializeError(self, data="XXX ERROR MESSAGE NEEDED") -> None: ... class SerializeError(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treeadapters/sax.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treeadapters/sax.pyi index 3ac32ef193..22c93013ce 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treeadapters/sax.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treeadapters/sax.pyi @@ -1,6 +1,3 @@ -prefix: str | None -localName: str -namespace: str prefix_mapping: dict[str, str] def to_sax(walker, handler) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/hvac/METADATA.toml index f16271580d..413110cd4b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/METADATA.toml @@ -1,3 +1,3 @@ -version = "2.3.*" +version = "2.4.*" upstream_repository = "https://github.com/hvac/hvac" requires = ["types-requests"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/__init__.pyi index 90323a99fb..7d3e80ff56 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.auth_methods.approle import AppRole as AppRole from hvac.api.auth_methods.aws import Aws as Aws from hvac.api.auth_methods.azure import Azure as Azure @@ -15,6 +13,7 @@ from hvac.api.auth_methods.okta import Okta as Okta from hvac.api.auth_methods.radius import Radius as Radius from hvac.api.auth_methods.token import Token as Token from hvac.api.auth_methods.userpass import Userpass as Userpass +from hvac.api.vault_api_base import VaultApiBase from hvac.api.vault_api_category import VaultApiCategory __all__ = ( @@ -37,5 +36,5 @@ __all__ = ( ) class AuthMethods(VaultApiCategory): - implemented_classes: Incomplete - unimplemented_classes: Incomplete + implemented_classes: list[type[VaultApiBase]] + unimplemented_classes: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/aws.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/aws.pyi index 79dba1a592..7298a79a1c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/aws.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/aws.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete +import logging from hvac.api.vault_api_base import VaultApiBase -logger: Incomplete +logger: logging.Logger class Aws(VaultApiBase): def configure( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/azure.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/azure.pyi index 5198b3c2e7..ffa918c02e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/azure.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/azure.pyi @@ -1,9 +1,10 @@ -from _typeshed import Incomplete +import logging from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str -logger: Incomplete + +logger: logging.Logger class Azure(VaultApiBase): def configure(self, tenant_id, resource, environment=None, client_id=None, client_secret=None, mount_point="azure"): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/gcp.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/gcp.pyi index 8a38915c36..8ebb0e03fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/gcp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/gcp.pyi @@ -1,9 +1,10 @@ -from _typeshed import Incomplete +import logging from hvac.api.vault_api_base import VaultApiBase DEFAULT_MOUNT_POINT: str -logger: Incomplete + +logger: logging.Logger class Gcp(VaultApiBase): def configure( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi index 490238b020..d351c9f5c7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/kubernetes.pyi @@ -26,6 +26,7 @@ class Kubernetes(VaultApiBase): token_type: str = "", mount_point="kubernetes", alias_name_source=None, + audience: str | None = None, ): ... def read_role(self, name, mount_point="kubernetes"): ... def list_roles(self, mount_point="kubernetes"): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi index 42598cb7fd..68f56a9528 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/auth_methods/legacy_mfa.pyi @@ -1,9 +1,7 @@ -from _typeshed import Incomplete - from hvac.api.vault_api_base import VaultApiBase -SUPPORTED_MFA_TYPES: Incomplete -SUPPORTED_AUTH_METHODS: Incomplete +SUPPORTED_MFA_TYPES: list[str] +SUPPORTED_AUTH_METHODS: list[str] class LegacyMfa(VaultApiBase): def configure(self, mount_point, mfa_type: str = "duo", force: bool = False): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/__init__.pyi index 3831a24d9e..b74e0da336 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.secrets_engines.active_directory import ActiveDirectory as ActiveDirectory from hvac.api.secrets_engines.aws import Aws as Aws from hvac.api.secrets_engines.azure import Azure as Azure @@ -15,6 +13,7 @@ from hvac.api.secrets_engines.rabbitmq import RabbitMQ as RabbitMQ from hvac.api.secrets_engines.ssh import Ssh as Ssh from hvac.api.secrets_engines.transform import Transform as Transform from hvac.api.secrets_engines.transit import Transit as Transit +from hvac.api.vault_api_base import VaultApiBase from hvac.api.vault_api_category import VaultApiCategory __all__ = ( @@ -37,5 +36,5 @@ __all__ = ( ) class SecretsEngines(VaultApiCategory): - implemented_classes: Incomplete - unimplemented_classes: Incomplete + implemented_classes: list[type[VaultApiBase]] + unimplemented_classes: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/identity.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/identity.pyi index acafad3d33..7adc589019 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/identity.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/identity.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete +import logging from hvac.api.vault_api_base import VaultApiBase -logger: Incomplete +logger: logging.Logger class Identity(VaultApiBase): def create_or_update_entity( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/kv.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/kv.pyi index 1f4a3de383..00b60c9c34 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/kv.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/secrets_engines/kv.pyi @@ -1,11 +1,11 @@ -from _typeshed import Incomplete +import logging from hvac.api.vault_api_base import VaultApiBase -logger: Incomplete +logger: logging.Logger class Kv(VaultApiBase): - allowed_kv_versions: Incomplete + allowed_kv_versions: list[str] def __init__(self, adapter, default_kv_version: str = "2") -> None: ... @property def v1(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/__init__.pyi index 23533ef389..c4066c28c7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/__init__.pyi @@ -1,5 +1,3 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.audit import Audit as Audit from hvac.api.system_backend.auth import Auth as Auth from hvac.api.system_backend.capabilities import Capabilities as Capabilities @@ -17,6 +15,7 @@ from hvac.api.system_backend.raft import Raft as Raft from hvac.api.system_backend.seal import Seal as Seal from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin as SystemBackendMixin from hvac.api.system_backend.wrapping import Wrapping as Wrapping +from hvac.api.vault_api_base import VaultApiBase from hvac.api.vault_api_category import VaultApiCategory __all__ = ( @@ -59,5 +58,5 @@ class SystemBackend( Seal, Wrapping, ): - implemented_classes: Incomplete - unimplemented_classes: Incomplete + implemented_classes: list[type[VaultApiBase]] + unimplemented_classes: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/system_backend_mixin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/system_backend_mixin.pyi index 44090cd45a..5aab4a8129 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/system_backend_mixin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/system_backend_mixin.pyi @@ -1,8 +1,8 @@ -from _typeshed import Incomplete +import logging from abc import ABCMeta from hvac.api.vault_api_base import VaultApiBase -logger: Incomplete +logger: logging.Logger class SystemBackendMixin(VaultApiBase, metaclass=ABCMeta): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/wrapping.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/wrapping.pyi index 37a386f425..090f79e47a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/wrapping.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/system_backend/wrapping.pyi @@ -1,7 +1,6 @@ -from _typeshed import Incomplete - from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin +from requests.models import _JSON class Wrapping(SystemBackendMixin): def unwrap(self, token=None): ... - def wrap(self, payload: dict[Incomplete, Incomplete] | None = None, ttl: int = 60): ... + def wrap(self, payload: _JSON | None = None, ttl: int = 60): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/vault_api_category.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/vault_api_category.pyi index 2162d9bb5f..2807505a3a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/vault_api_category.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/hvac/hvac/api/vault_api_category.pyi @@ -1,5 +1,4 @@ from abc import ABCMeta, abstractmethod -from collections.abc import Sequence from logging import Logger from typing import Any @@ -9,7 +8,7 @@ from hvac.api.vault_api_base import VaultApiBase logger: Logger class VaultApiCategory(VaultApiBase, metaclass=ABCMeta): - implemented_class_names: Sequence[str] + implemented_class_names: list[str] def __init__(self, adapter: Adapter[Any]) -> None: ... def __getattr__(self, item): ... @property @@ -20,6 +19,6 @@ class VaultApiCategory(VaultApiBase, metaclass=ABCMeta): @abstractmethod def implemented_classes(self): ... @property - def unimplemented_classes(self) -> None: ... + def unimplemented_classes(self) -> list[str]: ... @staticmethod def get_private_attr_name(class_name): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/METADATA.toml index e789388aa8..9fbe58c111 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/METADATA.toml @@ -1,4 +1,4 @@ -version = "6.3.1" +version = "6.3.2" upstream_repository = "https://github.com/collective/icalendar" requires = ["types-python-dateutil", "types-pytz"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/__init__.pyi index a95a2c94b8..bb992fea4a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/__init__.pyi @@ -114,7 +114,6 @@ __all__ = [ "RANGE", "vSkip", "RELATED", - "vSkip", "RELTYPE", "ROLE", "FeatureWillBeRemovedInFutureVersion", diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/alarms.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/alarms.pyi index d62f5641d0..975a413369 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/alarms.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/alarms.pyi @@ -21,7 +21,7 @@ class AlarmTime: acknowledged_until: datetime.datetime | None = None, snoozed_until: datetime.datetime | None = None, parent: Parent | None = None, - ): ... + ) -> None: ... @property def acknowledged(self) -> datetime.datetime | None: ... @property @@ -33,7 +33,7 @@ class AlarmTime: def trigger(self) -> datetime.date: ... class Alarms: - def __init__(self, component: Alarm | Event | Todo | None = None): ... + def __init__(self, component: Alarm | Event | Todo | None = None) -> None: ... def add_component(self, component: Alarm | Parent) -> None: ... def set_parent(self, parent: Parent) -> None: ... def add_alarm(self, alarm: Alarm) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/cal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/cal.pyi index db452d1f59..84aef3cffa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/cal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/cal.pyi @@ -1,16 +1,19 @@ import datetime from _typeshed import Incomplete, SupportsItems -from collections.abc import Callable -from typing import Any, ClassVar, Final, Literal, NamedTuple, overload +from collections.abc import Callable, Iterable +from typing import Any, ClassVar, Final, Literal, NamedTuple, TypeVar, overload from typing_extensions import Self from .alarms import Alarms from .caselessdict import CaselessDict from .error import IncompleteComponent as IncompleteComponent from .parser import Contentline, Contentlines -from .prop import TypesFactory, vRecur +from .parser_tools import ICAL_TYPE +from .prop import TypesFactory, _vType, vRecur from .timezone.tzp import TZP +_D = TypeVar("_D") + __all__ = [ "Alarm", "Calendar", @@ -32,7 +35,8 @@ __all__ = [ def get_example(component_directory: str, example_name: str) -> bytes: ... class ComponentFactory(CaselessDict[Incomplete]): - def __init__(self, *args, **kwargs) -> None: ... + # Inherit complex __init__ from CaselessDict<-dict. + ... INLINE: CaselessDict[int] @@ -47,7 +51,7 @@ class Component(CaselessDict[Incomplete]): subcomponents: list[Incomplete] errors: list[str] - def __init__(self, *args, **kwargs) -> None: ... + # Inherit complex __init__ from CaselessDict<-dict. def __bool__(self) -> bool: ... __nonzero__ = __bool__ def is_empty(self) -> bool: ... @@ -59,9 +63,12 @@ class Component(CaselessDict[Incomplete]): def add( self, name: str, value: Any, parameters: SupportsItems[str, str | None] | None = None, encode: Literal[True] = True ) -> None: ... - def decoded(self, name, default=[]): ... - def get_inline(self, name, decode: bool = True): ... - def set_inline(self, name, values, encode: bool = True) -> None: ... + def decoded(self, name: str, default: _D = ...) -> Incomplete | _D: ... + def get_inline(self, name: str, decode: bool = True) -> list[Incomplete]: ... + @overload + def set_inline(self, name: str, values: Iterable[str], encode: Literal[False] = ...) -> None: ... + @overload + def set_inline(self, name: str, values: Iterable[Incomplete], encode: Literal[True] = True) -> None: ... def add_component(self, component: Component) -> None: ... def walk(self, name: str | None = None, select: Callable[[Component], bool] = ...) -> list[Component]: ... def property_items(self, recursive: bool = True, sorted: bool = True) -> list[tuple[str, object]]: ... @@ -71,7 +78,7 @@ class Component(CaselessDict[Incomplete]): @overload @classmethod def from_ical(cls, st: str, multiple: Literal[True]) -> list[Component]: ... # or any of its subclasses - def content_line(self, name: str, value, sorted: bool = True) -> Contentline: ... + def content_line(self, name: str, value: _vType | ICAL_TYPE, sorted: bool = True) -> Contentline: ... def content_lines(self, sorted: bool = True) -> Contentlines: ... def to_ical(self, sorted: bool = True) -> bytes: ... def __eq__(self, other: Component) -> bool: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/parser.pyi index e1aaea27f9..c6924bb646 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/parser.pyi @@ -7,6 +7,7 @@ from typing_extensions import Self from .caselessdict import CaselessDict from .parser_tools import ICAL_TYPE +from .prop import _vType __all__ = [ "Contentline", @@ -83,7 +84,7 @@ class Contentline(str): strict: bool def __new__(cls, value: str | bytes, strict: bool = False, encoding: str = "utf-8") -> Self: ... @classmethod - def from_parts(cls, name: ICAL_TYPE, params: Parameters, values, sorted: bool = True) -> Self: ... + def from_parts(cls, name: ICAL_TYPE, params: Parameters, values: _vType | ICAL_TYPE, sorted: bool = True) -> Self: ... def parts(self) -> tuple[str, Parameters, str]: ... @classmethod def from_ical(cls, ical: str | bytes, strict: bool = False) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/prop.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/prop.pyi index a3e0c389f3..2675ebe82b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/prop.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/prop.pyi @@ -1,5 +1,5 @@ import datetime -from _typeshed import ConvertibleToFloat, ConvertibleToInt, SupportsKeysAndGetItem, Unused +from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, SupportsKeysAndGetItem, Unused from collections.abc import Iterable, Iterator from enum import Enum from re import Pattern @@ -84,7 +84,7 @@ class vText(str): class vCalAddress(str): params: Parameters - def __new__(cls, value: ICAL_TYPE, encoding="utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... + def __new__(cls, value: ICAL_TYPE, encoding: str = "utf-8", params: SupportsKeysAndGetItem[str, str] = {}) -> Self: ... def to_ical(self) -> bytes: ... @classmethod def from_ical(cls, ical: ICAL_TYPE) -> Self: ... @@ -127,7 +127,7 @@ class vDDDLists: def __init__(self, dt_list: Iterable[_AnyTimeType] | _AnyTimeType) -> None: ... def to_ical(self) -> bytes: ... @staticmethod - def from_ical(ical: str, timezone: str | datetime.timezone | None = None): ... + def from_ical(ical: str, timezone: str | datetime.timezone | None = None) -> list[Incomplete]: ... def __eq__(self, other: object) -> bool: ... class vCategory: diff --git a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/timezone/zoneinfo.pyi b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/timezone/zoneinfo.pyi index 2e6b6e938f..6891652c89 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/timezone/zoneinfo.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/icalendar/icalendar/timezone/zoneinfo.pyi @@ -2,7 +2,7 @@ import datetime from typing import Final, Literal from zoneinfo import ZoneInfo -from dateutil.rrule import rrule, rruleset +from dateutil.rrule import rrule from ..cal import Timezone from ..prop import vRecur @@ -22,7 +22,3 @@ class ZONEINFO(TZProvider): def create_timezone(self, tz: Timezone) -> datetime.tzinfo: ... # type: ignore[override] def uses_pytz(self) -> Literal[False]: ... def uses_zoneinfo(self) -> Literal[True]: ... - -def pickle_tzicalvtz(tzicalvtz): ... -def pickle_rruleset_with_cache(rs: rruleset): ... -def unpickle_rruleset_with_cache(rrule, rdate, exrule, exdate, cache): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi index d5eeedd557..7cb7544c04 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Literal from .abstract.attrDef import AttrDef as AttrDef from .abstract.attribute import ( @@ -97,7 +97,7 @@ HASHED_SALTED_SHA384: Literal["SALTED_SHA384"] HASHED_SALTED_SHA512: Literal["SALTED_SHA512"] HASHED_SALTED_MD5: Literal["SALTED_MD5"] -NUMERIC_TYPES: tuple[type[Any], ...] -INTEGER_TYPES: tuple[type[Any], ...] -STRING_TYPES: tuple[type[Any], ...] -SEQUENCE_TYPES: tuple[type[Any], ...] +NUMERIC_TYPES: tuple[type, ...] +INTEGER_TYPES: tuple[type, ...] +STRING_TYPES: tuple[type, ...] +SEQUENCE_TYPES: tuple[type, ...] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi index 5c2b1bd775..2e5fe21aca 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete STATUS_INIT: str STATUS_VIRTUAL: str @@ -11,5 +11,5 @@ STATUS_READY_FOR_DELETION: str STATUS_READY_FOR_MOVING: str STATUS_READY_FOR_RENAMING: str STATUS_DELETED: str -STATUSES: Any -INITIAL_STATUSES: Any +STATUSES: Incomplete +INITIAL_STATUSES: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi index 60332975fc..b59e04f5df 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi @@ -1,18 +1,18 @@ -from typing import Any +from _typeshed import Incomplete class AttrDef: - name: Any - key: Any - validate: Any - pre_query: Any - post_query: Any - default: Any - dereference_dn: Any - description: Any - mandatory: Any - single_value: Any - oid_info: Any - other_names: Any + name: Incomplete + key: Incomplete + validate: Incomplete + pre_query: Incomplete + post_query: Incomplete + default: Incomplete + dereference_dn: Incomplete + description: Incomplete + mandatory: Incomplete + single_value: Incomplete + oid_info: Incomplete + other_names: Incomplete def __init__( self, name, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi index d22ff5d2ed..8040ae647a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi @@ -1,14 +1,14 @@ -from typing import Any +from _typeshed import Incomplete class Attribute: - key: Any - definition: Any - values: Any - raw_values: Any - response: Any - entry: Any - cursor: Any - other_names: Any + key: Incomplete + definition: Incomplete + values: Incomplete + raw_values: Incomplete + response: Incomplete + entry: Incomplete + cursor: Incomplete + other_names: Incomplete def __init__(self, attr_def, entry, cursor) -> None: ... def __len__(self) -> int: ... def __iter__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi index 73c1eb2db8..81ad20d99e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi @@ -1,19 +1,20 @@ -from typing import Any, NamedTuple +from _typeshed import Incomplete +from typing import NamedTuple class Operation(NamedTuple): - request: Any - result: Any - response: Any + request: Incomplete + result: Incomplete + response: Incomplete class Cursor: - connection: Any - get_operational_attributes: Any - definition: Any - attributes: Any - controls: Any - execution_time: Any - entries: Any - schema: Any + connection: Incomplete + get_operational_attributes: Incomplete + definition: Incomplete + attributes: Incomplete + controls: Incomplete + execution_time: Incomplete + entries: Incomplete + schema: Incomplete def __init__( self, connection, @@ -38,14 +39,14 @@ class Cursor: def failed(self): ... class Reader(Cursor): - entry_class: Any - attribute_class: Any - entry_initial_status: Any - sub_tree: Any - base: Any - dereference_aliases: Any - validated_query: Any - query_filter: Any + entry_class: Incomplete + attribute_class: Incomplete + entry_initial_status: Incomplete + sub_tree: Incomplete + base: Incomplete + dereference_aliases: Incomplete + validated_query: Incomplete + query_filter: Incomplete def __init__( self, connection, @@ -68,8 +69,8 @@ class Reader(Cursor): @components_in_and.setter def components_in_and(self, value) -> None: ... def clear(self) -> None: ... - execution_time: Any - entries: Any + execution_time: Incomplete + entries: Incomplete def reset(self) -> None: ... def search(self, attributes=None): ... def search_object(self, entry_dn=None, attributes=None): ... @@ -78,14 +79,14 @@ class Reader(Cursor): def search_paged(self, paged_size, paged_criticality: bool = True, generator: bool = True, attributes=None): ... class Writer(Cursor): - entry_class: Any - attribute_class: Any - entry_initial_status: Any + entry_class: Incomplete + attribute_class: Incomplete + entry_initial_status: Incomplete @staticmethod def from_cursor(cursor, connection=None, object_def=None, custom_validator=None): ... @staticmethod def from_response(connection, object_def, response=None): ... - dereference_aliases: Any + dereference_aliases: Incomplete def __init__( self, connection, @@ -95,7 +96,7 @@ class Writer(Cursor): controls=None, auxiliary_class=None, ) -> None: ... - execution_time: Any + execution_time: Incomplete def commit(self, refresh: bool = True): ... def discard(self) -> None: ... def new(self, dn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi index 2c16d59fb8..1b2df740d1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi @@ -1,16 +1,16 @@ -from typing import Any +from _typeshed import Incomplete class EntryState: - dn: Any - status: Any - attributes: Any - raw_attributes: Any - response: Any - cursor: Any - origin: Any - read_time: Any - changes: Any - definition: Any + dn: Incomplete + status: Incomplete + attributes: Incomplete + raw_attributes: Incomplete + response: Incomplete + cursor: Incomplete + origin: Incomplete + read_time: Incomplete + changes: Incomplete + definition: Incomplete def __init__(self, dn, cursor) -> None: ... def set_status(self, status) -> None: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi index 518ad6b183..f0ce91bf5b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi @@ -1,17 +1,17 @@ import socket -from typing import Any +from _typeshed import Incomplete from typing_extensions import Self class LDAPException(Exception): ... class LDAPOperationResult(LDAPException): def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None) -> Self: ... - result: Any - description: Any - dn: Any - message: Any - type: Any - response: Any + result: Incomplete + description: Incomplete + dn: Incomplete + message: Incomplete + type: Incomplete + response: Incomplete def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None) -> None: ... class LDAPOperationsErrorResult(LDAPOperationResult): ... @@ -63,7 +63,7 @@ class LDAPAssertionFailedResult(LDAPOperationResult): ... class LDAPAuthorizationDeniedResult(LDAPOperationResult): ... class LDAPESyncRefreshRequiredResult(LDAPOperationResult): ... -exception_table: Any +exception_table: Incomplete class LDAPExceptionError(LDAPException): ... class LDAPConfigurationError(LDAPExceptionError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi index c35ddcad80..861a4d06af 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi @@ -1,19 +1,19 @@ -from typing import Any +from _typeshed import Incomplete -POOLING_STRATEGIES: Any +POOLING_STRATEGIES: Incomplete class ServerState: - server: Any - last_checked_time: Any - available: Any + server: Incomplete + last_checked_time: Incomplete + available: Incomplete def __init__(self, server, last_checked_time, available) -> None: ... class ServerPoolState: - server_states: Any - strategy: Any - server_pool: Any + server_states: Incomplete + strategy: Incomplete + server_pool: Incomplete last_used_server: int - initialize_time: Any + initialize_time: Incomplete def __init__(self, server_pool) -> None: ... def refresh(self) -> None: ... def get_current_server(self): ... @@ -23,12 +23,12 @@ class ServerPoolState: def __len__(self) -> int: ... class ServerPool: - servers: Any - pool_states: Any - active: Any - exhaust: Any - single: Any - strategy: Any + servers: Incomplete + pool_states: Incomplete + active: Incomplete + exhaust: Incomplete + single: Incomplete + strategy: Incomplete def __init__( self, servers=None, pool_strategy="ROUND_ROBIN", active: bool = True, exhaust: bool = False, single_state: bool = True ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi index 32123eff5f..41205368d2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi @@ -1,12 +1,12 @@ -from typing import Any +from _typeshed import Incomplete class ReverseDnsSetting: - OFF: Any - REQUIRE_RESOLVE_ALL_ADDRESSES: Any - REQUIRE_RESOLVE_IP_ADDRESSES_ONLY: Any - OPTIONAL_RESOLVE_ALL_ADDRESSES: Any - OPTIONAL_RESOLVE_IP_ADDRESSES_ONLY: Any - SUPPORTED_VALUES: Any + OFF: Incomplete + REQUIRE_RESOLVE_ALL_ADDRESSES: Incomplete + REQUIRE_RESOLVE_IP_ADDRESSES_ONLY: Incomplete + OPTIONAL_RESOLVE_ALL_ADDRESSES: Incomplete + OPTIONAL_RESOLVE_IP_ADDRESSES_ONLY: Incomplete + SUPPORTED_VALUES: Incomplete def get_hostname_by_addr(addr, success_required: bool = True): ... def is_ip_addr(addr): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi index a2772bd148..67e1e0dcb5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete RESULT_SUCCESS: int RESULT_OPERATIONS_ERROR: int @@ -52,5 +52,5 @@ RESULT_CANNOT_CANCEL: int RESULT_ASSERTION_FAILED: int RESULT_AUTHORIZATION_DENIED: int RESULT_E_SYNC_REFRESH_REQUIRED: int -RESULT_CODES: Any -DO_NOT_RAISE_EXCEPTIONS: Any +RESULT_CODES: Incomplete +DO_NOT_RAISE_EXCEPTIONS: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi index d86112148a..d31ad382f6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi @@ -1,22 +1,23 @@ -from typing import Any, Literal +from _typeshed import Incomplete +from typing import Literal unix_socket_available: bool class Server: ipc: bool - host: Any - port: Any - allowed_referral_hosts: Any - ssl: Any - tls: Any - name: Any - get_info: Any - dit_lock: Any - custom_formatter: Any - custom_validator: Any - current_address: Any - connect_timeout: Any - mode: Any + host: Incomplete + port: Incomplete + allowed_referral_hosts: Incomplete + ssl: Incomplete + tls: Incomplete + name: Incomplete + get_info: Incomplete + dit_lock: Incomplete + custom_formatter: Incomplete + custom_validator: Incomplete + current_address: Incomplete + connect_timeout: Incomplete + mode: Incomplete def __init__( self, host: str, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi index c6c52b37c0..f56ab0d9f4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi @@ -1,9 +1,9 @@ +from _typeshed import Incomplete from datetime import tzinfo -from typing import Any class OffsetTzInfo(tzinfo): - offset: Any - name: Any + offset: Incomplete + name: Incomplete def __init__(self, offset, name) -> None: ... def utcoffset(self, dt): ... def tzname(self, dt): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi index 87d423da05..3636fa700e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi @@ -1,20 +1,20 @@ -from typing import Any +from _typeshed import Incomplete use_ssl_context: bool class Tls: - ssl_options: Any - validate: Any - ca_certs_file: Any - ca_certs_path: Any - ca_certs_data: Any - private_key_password: Any - version: Any - private_key_file: Any - certificate_file: Any - valid_names: Any - ciphers: Any - sni: Any + ssl_options: Incomplete + validate: Incomplete + ca_certs_file: Incomplete + ca_certs_path: Incomplete + ca_certs_data: Incomplete + private_key_password: Incomplete + version: Incomplete + private_key_file: Incomplete + certificate_file: Incomplete + valid_names: Incomplete + ciphers: Incomplete + sni: Incomplete def __init__( self, local_private_key_file=None, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi index ff60063b95..19bcd126af 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete class ConnectionUsage: open_sockets: int @@ -26,11 +26,11 @@ class ConnectionUsage: restartable_successes: int servers_from_pool: int def reset(self) -> None: ... - initial_connection_start_time: Any - open_socket_start_time: Any - connection_stop_time: Any - last_transmitted_time: Any - last_received_time: Any + initial_connection_start_time: Incomplete + open_socket_start_time: Incomplete + connection_stop_time: Incomplete + last_transmitted_time: Incomplete + last_received_time: Incomplete def __init__(self) -> None: ... def __iadd__(self, other): ... def update_transmitted_message(self, message, length) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi index 90d3746d5a..db6afd88f4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete class ExtendedOperationContainer: def __init__(self, connection) -> None: ... @@ -91,7 +91,7 @@ class MicrosoftExtendedOperations(ExtendedOperationContainer): ): ... class ExtendedOperationsRoot(ExtendedOperationContainer): - standard: Any - novell: Any - microsoft: Any + standard: Incomplete + novell: Incomplete + microsoft: Incomplete def __init__(self, connection) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi index 68acbfc5d2..80f947e958 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi @@ -1,17 +1,17 @@ -from typing import Any +from _typeshed import Incomplete class DirSync: - connection: Any - base: Any - filter: Any - attributes: Any - cookie: Any - object_security: Any - ancestors_first: Any - public_data_only: Any - incremental_values: Any - max_length: Any - hex_guid: Any + connection: Incomplete + base: Incomplete + filter: Incomplete + attributes: Incomplete + cookie: Incomplete + object_security: Incomplete + ancestors_first: Incomplete + public_data_only: Incomplete + incremental_values: Incomplete + max_length: Incomplete + hex_guid: Incomplete more_results: bool def __init__( self, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi index 2e8818c7e1..5c39dfc8b0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi @@ -1,12 +1,12 @@ -from typing import Any +from _typeshed import Incomplete class ADPersistentSearch: - connection: Any - message_id: Any - base: Any - scope: Any - attributes: Any - controls: Any + connection: Incomplete + message_id: Incomplete + base: Incomplete + scope: Incomplete + attributes: Incomplete + controls: Incomplete filter: str def __init__(self, connection, search_base, search_scope, attributes, streaming, callback) -> None: ... def start(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi index 4ed2853672..8b059d36c2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi @@ -1,18 +1,18 @@ -from typing import Any +from _typeshed import Incomplete class PersistentSearch: - connection: Any - changes_only: Any - notifications: Any - message_id: Any - base: Any - filter: Any - scope: Any - dereference_aliases: Any - attributes: Any - size_limit: Any - time_limit: Any - controls: Any + connection: Incomplete + changes_only: Incomplete + notifications: Incomplete + message_id: Incomplete + base: Incomplete + filter: Incomplete + scope: Incomplete + dereference_aliases: Incomplete + attributes: Incomplete + size_limit: Incomplete + time_limit: Incomplete + controls: Incomplete def __init__( self, connection, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi index 53ff356433..f21725f4b7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -change_table: Any +change_table: Incomplete def modify_operation(dn, changes, auto_encode, schema=None, validator=None, check_names: bool = False): ... def modify_request_to_dict(request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi index 24577fea09..34f7b29c92 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete ROOT: int AND: int @@ -17,10 +17,10 @@ SEARCH_MATCH_OR_CLOSE: int SEARCH_MATCH_OR_CONTROL: int class FilterNode: - tag: Any - parent: Any - assertion: Any - elements: Any + tag: Incomplete + parent: Incomplete + assertion: Incomplete + elements: Incomplete def __init__(self, tag=None, assertion=None) -> None: ... def __str__(self, pos: int = 0) -> str: ... def __repr__(self, pos: int = 0) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi index 69c4e1aea1..62eb577c68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete def format_unicode(raw_value): ... def format_integer(raw_value): ... @@ -8,7 +8,7 @@ def format_uuid_le(raw_value): ... def format_boolean(raw_value): ... def format_ad_timestamp(raw_value): ... -time_format: Any +time_format: Incomplete def format_time(raw_value): ... def format_ad_timedelta(raw_value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi index f85dd6485a..7fa4eb9274 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -standard_formatter: Any +standard_formatter: Incomplete def find_attribute_helpers(attr_type, name, custom_formatter): ... def format_attribute_values(schema, name, values, custom_formatter): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi index 77efddad27..83886b0b56 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete OID_CONTROL: str OID_EXTENSION: str @@ -26,4 +26,4 @@ def decode_oids(sequence): ... def decode_syntax(syntax): ... def oid_to_string(oid): ... -Oids: Any +Oids: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi index 03de801728..62420dde63 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -conf_ldif_line_length: Any +conf_ldif_line_length: Incomplete def safe_ldif_string(bytes_value): ... def add_controls(controls, all_base64): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi index b3c3d10f42..8334585e40 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete def constant_to_class_kind(value): ... def constant_to_attribute_usage(value): ... @@ -9,7 +9,7 @@ def extension_to_tuple(extension_string): ... def list_to_string(list_object): ... class BaseServerInfo: - raw: Any + raw: Incomplete def __init__(self, raw_attributes) -> None: ... @classmethod def from_json(cls, json_definition, schema=None, custom_formatter=None): ... @@ -19,43 +19,43 @@ class BaseServerInfo: def to_json(self, indent: int = 4, sort: bool = True): ... class DsaInfo(BaseServerInfo): - alt_servers: Any - naming_contexts: Any - supported_controls: Any - supported_extensions: Any - supported_features: Any - supported_ldap_versions: Any - supported_sasl_mechanisms: Any - vendor_name: Any - vendor_version: Any - schema_entry: Any - other: Any + alt_servers: Incomplete + naming_contexts: Incomplete + supported_controls: Incomplete + supported_extensions: Incomplete + supported_features: Incomplete + supported_ldap_versions: Incomplete + supported_sasl_mechanisms: Incomplete + vendor_name: Incomplete + vendor_version: Incomplete + schema_entry: Incomplete + other: Incomplete def __init__(self, attributes, raw_attributes) -> None: ... class SchemaInfo(BaseServerInfo): - schema_entry: Any - create_time_stamp: Any - modify_time_stamp: Any - attribute_types: Any - object_classes: Any - matching_rules: Any - matching_rule_uses: Any - dit_content_rules: Any - dit_structure_rules: Any - name_forms: Any - ldap_syntaxes: Any - other: Any + schema_entry: Incomplete + create_time_stamp: Incomplete + modify_time_stamp: Incomplete + attribute_types: Incomplete + object_classes: Incomplete + matching_rules: Incomplete + matching_rule_uses: Incomplete + dit_content_rules: Incomplete + dit_structure_rules: Incomplete + name_forms: Incomplete + ldap_syntaxes: Incomplete + other: Incomplete def __init__(self, schema_entry, attributes, raw_attributes) -> None: ... def is_valid(self): ... class BaseObjectInfo: - oid: Any - name: Any - description: Any - obsolete: Any - extensions: Any - experimental: Any - raw_definition: Any + oid: Incomplete + name: Incomplete + description: Incomplete + obsolete: Incomplete + extensions: Incomplete + experimental: Incomplete + raw_definition: Incomplete def __init__( self, oid=None, name=None, description=None, obsolete: bool = False, extensions=None, experimental=None, definition=None ) -> None: ... @@ -65,7 +65,7 @@ class BaseObjectInfo: def from_definition(cls, definitions): ... class MatchingRuleInfo(BaseObjectInfo): - syntax: Any + syntax: Incomplete def __init__( self, oid=None, @@ -79,7 +79,7 @@ class MatchingRuleInfo(BaseObjectInfo): ) -> None: ... class MatchingRuleUseInfo(BaseObjectInfo): - apply_to: Any + apply_to: Incomplete def __init__( self, oid=None, @@ -93,10 +93,10 @@ class MatchingRuleUseInfo(BaseObjectInfo): ) -> None: ... class ObjectClassInfo(BaseObjectInfo): - superior: Any - kind: Any - must_contain: Any - may_contain: Any + superior: Incomplete + kind: Incomplete + must_contain: Incomplete + may_contain: Incomplete def __init__( self, oid=None, @@ -113,18 +113,18 @@ class ObjectClassInfo(BaseObjectInfo): ) -> None: ... class AttributeTypeInfo(BaseObjectInfo): - superior: Any - equality: Any - ordering: Any - substring: Any - syntax: Any - min_length: Any - single_value: Any - collective: Any - no_user_modification: Any - usage: Any - mandatory_in: Any - optional_in: Any + superior: Incomplete + equality: Incomplete + ordering: Incomplete + substring: Incomplete + syntax: Incomplete + min_length: Incomplete + single_value: Incomplete + collective: Incomplete + no_user_modification: Incomplete + usage: Incomplete + mandatory_in: Incomplete + optional_in: Incomplete def __init__( self, oid=None, @@ -150,10 +150,10 @@ class LdapSyntaxInfo(BaseObjectInfo): def __init__(self, oid=None, description=None, extensions=None, experimental=None, definition=None) -> None: ... class DitContentRuleInfo(BaseObjectInfo): - auxiliary_classes: Any - must_contain: Any - may_contain: Any - not_contains: Any + auxiliary_classes: Incomplete + must_contain: Incomplete + may_contain: Incomplete + not_contains: Incomplete def __init__( self, oid=None, @@ -170,8 +170,8 @@ class DitContentRuleInfo(BaseObjectInfo): ) -> None: ... class DitStructureRuleInfo(BaseObjectInfo): - superior: Any - name_form: Any + superior: Incomplete + name_form: Incomplete def __init__( self, oid=None, @@ -186,9 +186,9 @@ class DitStructureRuleInfo(BaseObjectInfo): ) -> None: ... class NameFormInfo(BaseObjectInfo): - object_class: Any - must_contain: Any - may_contain: Any + object_class: Incomplete + must_contain: Incomplete + may_contain: Incomplete def __init__( self, oid=None, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi index b0ed1b4fd0..ece52ea4d1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi @@ -1,17 +1,17 @@ -from typing import Any +from _typeshed import Incomplete from ..strategy.asynchronous import AsyncStrategy class AsyncStreamStrategy(AsyncStrategy): can_stream: bool - line_separator: Any + line_separator: Incomplete all_base64: bool - stream: Any - order: Any - persistent_search_message_id: Any + stream: Incomplete + order: Incomplete + persistent_search_message_id: Incomplete streaming: bool - callback: Any - events: Any + callback: Incomplete + events: Incomplete def __init__(self, ldap_connection) -> None: ... def accumulate_stream(self, message_id, change) -> None: ... def get_stream(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi index 883c285af2..5e8be4de85 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi @@ -1,12 +1,12 @@ +from _typeshed import Incomplete from threading import Thread -from typing import Any from ..strategy.base import BaseStrategy class AsyncStrategy(BaseStrategy): class ReceiverSocketThread(Thread): - connection: Any - socket_size: Any + connection: Incomplete + socket_size: Incomplete def __init__(self, ldap_connection) -> None: ... def run(self) -> None: ... @@ -14,9 +14,9 @@ class AsyncStrategy(BaseStrategy): no_real_dsa: bool pooled: bool can_stream: bool - receiver: Any - async_lock: Any - event_lock: Any + receiver: Incomplete + async_lock: Incomplete + event_lock: Incomplete def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... def close(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi index 41fb3a05c1..1adc87f031 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete unix_socket_available: bool SESSION_TERMINATED_BY_SERVER: str @@ -6,12 +6,12 @@ TRANSACTION_ERROR: str RESPONSE_COMPLETE: str class BaseStrategy: - connection: Any - sync: Any - no_real_dsa: Any - pooled: Any - can_stream: Any - referral_cache: Any + connection: Incomplete + sync: Incomplete + no_real_dsa: Incomplete + pooled: Incomplete + can_stream: Incomplete + referral_cache: Incomplete thread_safe: bool def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi index 0c740e77cc..92524d3ad5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete from .base import BaseStrategy @@ -7,10 +7,10 @@ class LdifProducerStrategy(BaseStrategy): no_real_dsa: bool pooled: bool can_stream: bool - line_separator: Any + line_separator: Incomplete all_base64: bool - stream: Any - order: Any + stream: Incomplete + order: Incomplete def __init__(self, ldap_connection) -> None: ... def receiving(self) -> None: ... def send(self, message_type, request, controls=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi index 619b8befa2..9ff38e9dd1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete from .asynchronous import AsyncStrategy from .mockBase import MockBaseStrategy @@ -6,6 +6,6 @@ from .mockBase import MockBaseStrategy class MockAsyncStrategy(MockBaseStrategy, AsyncStrategy): def __init__(self, ldap_connection) -> None: ... def post_send_search(self, payload): ... - bound: Any + bound: Incomplete def post_send_single_response(self, payload): ... def get_response(self, message_id, timeout=None, get_request: bool = False): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi index 8f828dcfc1..879cc5efec 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi @@ -1,25 +1,25 @@ -from typing import Any +from _typeshed import Incomplete -SEARCH_CONTROLS: Any +SEARCH_CONTROLS: Incomplete SERVER_ENCODING: str def random_cookie(): ... class PagedSearchSet: - size: Any - response: Any - cookie: Any + size: Incomplete + response: Incomplete + cookie: Incomplete sent: int done: bool def __init__(self, response, size, criticality) -> None: ... def next(self, size=None): ... class MockBaseStrategy: - entries: Any + entries: Incomplete no_real_dsa: bool - bound: Any - custom_validators: Any - operational_attributes: Any + bound: Incomplete + custom_validators: Incomplete + operational_attributes: Incomplete def __init__(self) -> None: ... def add_entry(self, dn, attributes, validate: bool = True): ... def remove_entry(self, dn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi index dcfd12d895..7d8b07ec7c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete from .mockBase import MockBaseStrategy from .sync import SyncStrategy @@ -6,5 +6,5 @@ from .sync import SyncStrategy class MockSyncStrategy(MockBaseStrategy, SyncStrategy): def __init__(self, ldap_connection) -> None: ... def post_send_search(self, payload): ... - bound: Any + bound: Incomplete def post_send_single_response(self, payload): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi index 4e5c6dc377..bec37ed375 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi @@ -1,4 +1,4 @@ -from typing import Any +from _typeshed import Incomplete from .sync import SyncStrategy @@ -7,9 +7,9 @@ class RestartableStrategy(SyncStrategy): no_real_dsa: bool pooled: bool can_stream: bool - restartable_sleep_time: Any - restartable_tries: Any - exception_history: Any + restartable_sleep_time: Incomplete + restartable_tries: Incomplete + exception_history: Incomplete def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = False, read_server_info: bool = True) -> None: ... def send(self, message_type, request, controls=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi index 4624d7058f..92b2f3ad7b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from threading import Thread -from typing import Any from .base import BaseStrategy @@ -10,27 +10,27 @@ BOGUS_EXTENDED: int BOGUS_ABANDON: int class ReusableStrategy(BaseStrategy): - pools: Any + pools: Incomplete def receiving(self) -> None: ... def get_stream(self) -> None: ... def set_stream(self, value) -> None: ... class ConnectionPool: def __new__(cls, connection): ... - name: Any - master_connection: Any - workers: Any - pool_size: Any - lifetime: Any - keepalive: Any - request_queue: Any + name: Incomplete + master_connection: Incomplete + workers: Incomplete + pool_size: Incomplete + lifetime: Incomplete + keepalive: Incomplete + request_queue: Incomplete open_pool: bool bind_pool: bool tls_pool: bool counter: int - terminated_usage: Any + terminated_usage: Incomplete terminated: bool - pool_lock: Any + pool_lock: Incomplete started: bool def __init__(self, connection) -> None: ... def get_info_from_server(self) -> None: ... @@ -41,22 +41,22 @@ class ReusableStrategy(BaseStrategy): class PooledConnectionThread(Thread): daemon: bool - worker: Any - master_connection: Any + worker: Incomplete + master_connection: Incomplete def __init__(self, worker, master_connection) -> None: ... def run(self) -> None: ... class PooledConnectionWorker: - master_connection: Any - request_queue: Any + master_connection: Incomplete + request_queue: Incomplete running: bool busy: bool get_info_from_server: bool - connection: Any - creation_time: Any + connection: Incomplete + creation_time: Incomplete task_counter: int - thread: Any - worker_lock: Any + thread: Incomplete + worker_lock: Incomplete def __init__(self, connection, request_queue) -> None: ... def new_connection(self) -> None: ... @@ -64,7 +64,7 @@ class ReusableStrategy(BaseStrategy): no_real_dsa: bool pooled: bool can_stream: bool - pool: Any + pool: Incomplete def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... def terminate(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi index fb77f59093..c270ecb6df 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi @@ -1,15 +1,15 @@ -from typing import Any +from _typeshed import Incomplete from ..strategy.base import BaseStrategy -LDAP_MESSAGE_TEMPLATE: Any +LDAP_MESSAGE_TEMPLATE: Incomplete class SyncStrategy(BaseStrategy): sync: bool no_real_dsa: bool pooled: bool can_stream: bool - socket_size: Any + socket_size: Incomplete def __init__(self, ldap_connection) -> None: ... def open(self, reset_usage: bool = True, read_server_info: bool = True) -> None: ... def receiving(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi index 80971d5fc1..e16df54889 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -PARAMETERS: Any +PARAMETERS: Incomplete def get_config_parameter(parameter): ... def set_config_parameter(parameter, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi index c4aa9f8a3d..86c0f721e2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -algorithms_table: Any -salted_table: Any +algorithms_table: Incomplete +salted_table: Incomplete def hashed(algorithm, value, salt=None, raw: bool = False, encoding: str = "utf-8"): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi index e7a4681b06..9ab59fda40 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from logging import NullHandler as NullHandler -from typing import Any OFF: int ERROR: int @@ -7,7 +7,7 @@ BASIC: int PROTOCOL: int NETWORK: int EXTENDED: int -DETAIL_LEVELS: Any +DETAIL_LEVELS: Incomplete def get_detail_level_name(level_name): ... def log(detail, message, *args) -> None: ... @@ -22,4 +22,4 @@ def set_library_log_detail_level(detail) -> None: ... def get_library_log_detail_level(): ... def format_ldap_message(message, prefix): ... -logger: Any +logger: Incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi index e03381cabb..37045dcbb0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi @@ -1,6 +1,6 @@ -from typing import Any +from _typeshed import Incomplete -oem_encoding: Any +oem_encoding: Incomplete NTLM_SIGNATURE: bytes NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE: int NTLM_MESSAGE_TYPE_NTLM_CHALLENGE: int @@ -27,7 +27,7 @@ FLAG_NEGOTIATE_SIGN: int FLAG_REQUEST_TARGET: int FLAG_NEGOTIATE_OEM: int FLAG_NEGOTIATE_UNICODE: int -FLAG_TYPES: Any +FLAG_TYPES: Incomplete AV_END_OF_LIST: int AV_NETBIOS_COMPUTER_NAME: int AV_NETBIOS_DOMAIN_NAME: int @@ -39,60 +39,60 @@ AV_TIMESTAMP: int AV_SINGLE_HOST_DATA: int AV_TARGET_NAME: int AV_CHANNEL_BINDINGS: int -AV_TYPES: Any +AV_TYPES: Incomplete AV_FLAG_CONSTRAINED: int AV_FLAG_INTEGRITY: int AV_FLAG_TARGET_SPN_UNTRUSTED: int -AV_FLAG_TYPES: Any +AV_FLAG_TYPES: Incomplete def pack_windows_version(debug: bool = False): ... def unpack_windows_version(version_message): ... class NtlmClient: client_config_flags: int - exported_session_key: Any - negotiated_flags: Any - user_name: Any - user_domain: Any - no_lm_response_ntlm_v1: Any + exported_session_key: Incomplete + negotiated_flags: Incomplete + user_name: Incomplete + user_domain: Incomplete + no_lm_response_ntlm_v1: Incomplete client_blocked: bool - client_block_exceptions: Any - client_require_128_bit_encryption: Any - max_life_time: Any - client_signing_key: Any - client_sealing_key: Any - sequence_number: Any - server_sealing_key: Any - server_signing_key: Any + client_block_exceptions: Incomplete + client_require_128_bit_encryption: Incomplete + max_life_time: Incomplete + client_signing_key: Incomplete + client_sealing_key: Incomplete + sequence_number: Incomplete + server_sealing_key: Incomplete + server_signing_key: Incomplete integrity: bool replay_detect: bool sequence_detect: bool confidentiality: bool datagram: bool identity: bool - client_supplied_target_name: Any - client_channel_binding_unhashed: Any - unverified_target_name: Any - server_challenge: Any - server_target_name: Any - server_target_info: Any - server_version: Any - server_av_netbios_computer_name: Any - server_av_netbios_domain_name: Any - server_av_dns_computer_name: Any - server_av_dns_domain_name: Any - server_av_dns_forest_name: Any - server_av_target_name: Any - server_av_flags: Any - server_av_timestamp: Any - server_av_single_host_data: Any - server_av_channel_bindings: Any - server_av_flag_constrained: Any - server_av_flag_integrity: Any - server_av_flag_target_spn_untrusted: Any - current_encoding: Any - client_challenge: Any - server_target_info_raw: Any + client_supplied_target_name: Incomplete + client_channel_binding_unhashed: Incomplete + unverified_target_name: Incomplete + server_challenge: Incomplete + server_target_name: Incomplete + server_target_info: Incomplete + server_version: Incomplete + server_av_netbios_computer_name: Incomplete + server_av_netbios_domain_name: Incomplete + server_av_dns_computer_name: Incomplete + server_av_dns_domain_name: Incomplete + server_av_dns_forest_name: Incomplete + server_av_target_name: Incomplete + server_av_flags: Incomplete + server_av_timestamp: Incomplete + server_av_single_host_data: Incomplete + server_av_channel_bindings: Incomplete + server_av_flag_constrained: Incomplete + server_av_flag_integrity: Incomplete + server_av_flag_target_spn_untrusted: Incomplete + current_encoding: Incomplete + client_challenge: Incomplete + server_target_info_raw: Incomplete def __init__(self, domain, user_name, password) -> None: ... def get_client_flag(self, flag): ... def get_negotiated_flag(self, flag): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi index f2c58e20ed..41c269d198 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi @@ -1,5 +1,5 @@ -from typing import Any +from _typeshed import Incomplete -repr_encoding: Any +repr_encoding: Incomplete def to_stdout_encoding(value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/lupa/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/lupa/METADATA.toml index ce6fd0811d..b82a8453e4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/lupa/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/lupa/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.5.*" +version = "2.6.*" upstream_repository = "https://github.com/scoder/lupa" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/__init__.pyi index b857e10436..9aa6a680bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/__init__.pyi @@ -1,6 +1,7 @@ from _typeshed import ConvertibleToInt, Unused from abc import abstractmethod from collections.abc import Iterable, Iterator +from types import ModuleType from typing import Literal, SupportsIndex, SupportsInt, overload from typing_extensions import Self, TypeAlias @@ -95,7 +96,9 @@ class IPListMixin: def __contains__(self, other: BaseIP | _IPAddressAddr) -> bool: ... def __bool__(self) -> Literal[True]: ... -def parse_ip_network(module, addr: tuple[int, int] | str, flags: int = 0, *, expand_partial: bool = False) -> tuple[int, int]: ... +def parse_ip_network( + module: ModuleType, addr: tuple[int, int] | str, flags: int = 0, *, expand_partial: bool = False +) -> tuple[int, int]: ... class IPNetwork(BaseIP, IPListMixin): __slots__ = ("_prefixlen",) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/iana.pyi b/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/iana.pyi index ac386eb781..a689f90cb5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/iana.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/netaddr/netaddr/ip/iana.pyi @@ -1,9 +1,9 @@ -from _typeshed import Incomplete, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Callable, Mapping, MutableMapping from typing import Any from typing_extensions import TypeAlias -from xml.sax import handler -from xml.sax.xmlreader import AttributesImpl, XMLReader +from xml.sax import _Source, handler +from xml.sax.xmlreader import AttributesImpl, InputSource, XMLReader from netaddr.core import Publisher, Subscriber from netaddr.ip import IPAddress, IPNetwork, IPRange @@ -20,8 +20,8 @@ class SaxRecordParser(handler.ContentHandler): class XMLRecordParser(Publisher): xmlparser: XMLReader - fh: Incomplete - def __init__(self, fh, **kwargs: object) -> None: ... + fh: InputSource | _Source + def __init__(self, fh: InputSource | _Source, **kwargs: object) -> None: ... def process_record(self, rec: Mapping[str, object]) -> dict[str, str] | None: ... def consume_record(self, rec: object) -> None: ... def parse(self) -> None: ... @@ -41,11 +41,11 @@ class MulticastParser(XMLRecordParser): def normalise_addr(self, addr: str) -> str: ... class DictUpdater(Subscriber): - dct: MutableMapping[_IanaInfoKey, Incomplete] + dct: MutableMapping[_IanaInfoKey, dict[str, Any]] topic: str unique_key: str - def __init__(self, dct: MutableMapping[_IanaInfoKey, Incomplete], topic: str, unique_key: str) -> None: ... - def update(self, data) -> None: ... + def __init__(self, dct: MutableMapping[_IanaInfoKey, dict[str, Any]], topic: str, unique_key: str) -> None: ... + def update(self, data: dict[str, Any]) -> None: ... def load_info() -> None: ... def pprint_info(fh: SupportsWrite[str] | None = None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/networkx/METADATA.toml index 99c93ff415..9259f40c5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.5" +version = "3.6.1" upstream_repository = "https://github.com/networkx/networkx" # requires a version of numpy with a `py.typed` file requires = ["numpy>=1.20"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/__init__.pyi index 9a9c70d111..7dc8465ff4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/__init__.pyi @@ -102,6 +102,7 @@ from networkx.algorithms.mis import * from networkx.algorithms.moral import * from networkx.algorithms.non_randomness import * from networkx.algorithms.operators import * +from networkx.algorithms.perfect_graph import * from networkx.algorithms.planar_drawing import * from networkx.algorithms.planarity import * from networkx.algorithms.polynomials import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi index 2b3bedd6fe..b4acded879 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/steinertree.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete from collections.abc import Iterable +from typing_extensions import deprecated from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -7,6 +8,9 @@ from networkx.utils.backends import _dispatchable __all__ = ["metric_closure", "steiner_tree"] @_dispatchable +@deprecated( + "`metric_closure` is deprecated and will be removed in NetworkX 3.8. Use `networkx.all_pairs_shortest_path_length` instead." +) def metric_closure(G: Graph[_Node], weight="weight"): ... @_dispatchable def steiner_tree(G: Graph[_Node], terminal_nodes: Iterable[Incomplete], weight: str = "weight", method: str | None = None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi index 00f95e3eb0..6a9f9afe2d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/approximation/treewidth.pyi @@ -1,4 +1,6 @@ from _typeshed import Incomplete +from collections.abc import Callable, Mapping +from typing import Generic from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -6,16 +8,18 @@ from networkx.utils.backends import _dispatchable __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] @_dispatchable -def treewidth_min_degree(G: Graph[_Node]): ... +def treewidth_min_degree(G: Graph[_Node]) -> tuple[int, Graph[frozenset[_Node]]]: ... @_dispatchable -def treewidth_min_fill_in(G: Graph[_Node]): ... +def treewidth_min_fill_in(G: Graph[_Node]) -> tuple[int, Graph[frozenset[_Node]]]: ... -class MinDegreeHeuristic: +class MinDegreeHeuristic(Generic[_Node]): count: Incomplete - def __init__(self, graph) -> None: ... - def best_node(self, graph): ... + def __init__(self, graph: Graph[_Node]) -> None: ... + def best_node(self, graph: Mapping[_Node, set[_Node]]) -> _Node | None: ... -def min_fill_in_heuristic(graph_dict) -> Incomplete | None: ... +def min_fill_in_heuristic(graph_dict: Mapping[_Node, set[_Node]]) -> _Node | None: ... @_dispatchable -def treewidth_decomp(G: Graph[_Node], heuristic=...) -> tuple[int, Graph[_Node]]: ... +def treewidth_decomp( + G: Graph[_Node], heuristic: Callable[[dict[_Node, set[_Node]]], _Node | None] = ... +) -> tuple[int, Graph[frozenset[_Node]]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi index 84168115f9..1650588a9d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/assortativity/mixing.pyi @@ -1,5 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem -from collections.abc import Iterable +from _typeshed import Incomplete +from collections.abc import Iterable, Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -15,7 +15,7 @@ def attribute_mixing_matrix( G: Graph[_Node], attribute: str, nodes: Iterable[Incomplete] | None = None, - mapping: SupportsGetItem[Incomplete, Incomplete] | None = None, + mapping: Mapping[Incomplete, Incomplete] | None = None, normalized: bool = True, ): ... @_dispatchable @@ -30,7 +30,7 @@ def degree_mixing_matrix( weight: str | None = None, nodes: Iterable[Incomplete] | None = None, normalized: bool = True, - mapping: SupportsGetItem[Incomplete, Incomplete] | None = None, + mapping: Mapping[Incomplete, Incomplete] | None = None, ): ... @_dispatchable def mixing_dict(xy, normalized: bool = False) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matching.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matching.pyi index cdf9dddbbb..49c12489af 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matching.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matching.pyi @@ -1,5 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem -from collections.abc import Iterable +from _typeshed import Incomplete +from collections.abc import Iterable, Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -12,7 +12,7 @@ def hopcroft_karp_matching(G: Graph[_Node], top_nodes: Iterable[_Node] | None = def eppstein_matching(G: Graph[_Node], top_nodes: Iterable[Incomplete] | None = None) -> dict[Incomplete, Incomplete]: ... @_dispatchable def to_vertex_cover( - G: Graph[_Node], matching: SupportsGetItem[Incomplete, Incomplete], top_nodes: Iterable[Incomplete] | None = None + G: Graph[_Node], matching: Mapping[Incomplete, Incomplete], top_nodes: Iterable[Incomplete] | None = None ): ... maximum_matching = hopcroft_karp_matching diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi index b33fed570b..1a94c13108 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/bipartite/matrix.pyi @@ -16,4 +16,11 @@ def biadjacency_matrix( format="csr", ): ... # Return is a complex union of scipy classes depending on the format param @_dispatchable -def from_biadjacency_matrix(A, create_using: Graph[_Node] | None = None, edge_attribute: str = "weight"): ... +def from_biadjacency_matrix( + A, + create_using: Graph[_Node] | None = None, + edge_attribute: str = "weight", + *, + row_order: Iterable[Incomplete] | None = None, + column_order: Iterable[Incomplete] | None = None, +): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/closeness.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/closeness.pyi index 72201138a9..7719ca78e6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/closeness.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/closeness.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete, SupportsKeysAndGetItem from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -13,7 +13,7 @@ def closeness_centrality( def incremental_closeness_centrality( G: Graph[_Node], edge: tuple[Incomplete], - prev_cc: SupportsGetItem[Incomplete, Incomplete] | None = None, + prev_cc: SupportsKeysAndGetItem[Incomplete, Incomplete] | None = None, insertion: bool | None = True, wf_improved: bool | None = True, ) -> dict[_Node, float]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi index c22f749306..cfee1601ec 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/current_flow_betweenness.pyi @@ -20,6 +20,8 @@ def approximate_current_flow_betweenness_centrality( epsilon: float = 0.5, kmax: int = 10000, seed: int | RandomState | None = None, + *, + sample_weight: float = 1, ) -> dict[Incomplete, float]: ... @_dispatchable def current_flow_betweenness_centrality( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi index d0e6f96b92..75ee56abc2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/eigenvector.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete +from collections.abc import Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -10,7 +11,7 @@ def eigenvector_centrality( G: Graph[_Node], max_iter: int | None = 100, tol: float | None = 1e-06, - nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + nstart: Mapping[Incomplete, Incomplete] | None = None, weight: str | None = None, ) -> dict[Incomplete, float]: ... @_dispatchable diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/katz.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/katz.pyi index ca867e0d62..b8b8cda7b7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/katz.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/katz.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import ConvertibleToFloat, Incomplete, SupportsItemAccess +from collections.abc import Iterable, Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -9,10 +10,10 @@ __all__ = ["katz_centrality", "katz_centrality_numpy"] def katz_centrality( G: Graph[_Node], alpha: float | None = 0.1, - beta: float | SupportsGetItem[Incomplete, Incomplete] | None = 1.0, + beta: ConvertibleToFloat | Iterable[Incomplete] | None = 1.0, max_iter: int | None = 1000, tol: float | None = 1e-06, - nstart: SupportsGetItem[Incomplete, Incomplete] | None = None, + nstart: SupportsItemAccess[Incomplete, Incomplete] | None = None, normalized: bool | None = True, weight: str | None = None, ) -> dict[Incomplete, Incomplete]: ... @@ -20,7 +21,7 @@ def katz_centrality( def katz_centrality_numpy( G: Graph[_Node], alpha: float = 0.1, - beta: float | SupportsGetItem[Incomplete, Incomplete] | None = 1.0, + beta: float | Mapping[Incomplete, Incomplete] | None = 1.0, normalized: bool = True, weight: str | None = None, ) -> dict[Incomplete, Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/percolation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/percolation.pyi index cc5faab10f..becea462a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/percolation.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/percolation.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete +from collections.abc import Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -9,6 +10,6 @@ __all__ = ["percolation_centrality"] def percolation_centrality( G: Graph[_Node], attribute: str | None = "percolation", - states: SupportsGetItem[Incomplete, Incomplete] | None = None, + states: Mapping[Incomplete, Incomplete] | None = None, weight: str | None = None, ) -> dict[Incomplete, float]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/reaching.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/reaching.pyi index d5a6e1479a..b491df5add 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/reaching.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/reaching.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete +from collections.abc import Mapping from networkx.classes.digraph import DiGraph from networkx.classes.graph import _Node @@ -12,7 +13,7 @@ def global_reaching_centrality(G: DiGraph[_Node], weight: str | None = None, nor def local_reaching_centrality( G: DiGraph[_Node], v: _Node, - paths: SupportsGetItem[Incomplete, Incomplete] | None = None, + paths: Mapping[Incomplete, Incomplete] | None = None, weight: str | None = None, normalized: bool | None = True, ) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi index 21671ced7d..c6824639b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/centrality/subgraph_alg.pyi @@ -6,9 +6,9 @@ from networkx.utils.backends import _dispatchable __all__ = ["subgraph_centrality_exp", "subgraph_centrality", "communicability_betweenness_centrality", "estrada_index"] @_dispatchable -def subgraph_centrality_exp(G: Graph[_Node]) -> dict[Incomplete, float]: ... +def subgraph_centrality_exp(G: Graph[_Node], *, normalized: bool = False) -> dict[Incomplete, float]: ... @_dispatchable -def subgraph_centrality(G: Graph[_Node]) -> dict[Incomplete, float]: ... +def subgraph_centrality(G: Graph[_Node], *, normalized: bool = False) -> dict[Incomplete, float]: ... @_dispatchable def communicability_betweenness_centrality(G: Graph[_Node]) -> dict[Incomplete, Incomplete]: ... @_dispatchable diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/cluster.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/cluster.pyi index 15a497ec87..d04b17d420 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/cluster.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/cluster.pyi @@ -1,14 +1,24 @@ from _typeshed import Incomplete -from collections.abc import Iterable +from collections.abc import Generator, Iterable -from networkx.classes.graph import Graph, _Node +from networkx.classes.graph import Graph, _NBunch, _Node from networkx.utils.backends import _dispatchable -__all__ = ["triangles", "average_clustering", "clustering", "transitivity", "square_clustering", "generalized_degree"] +__all__ = [ + "triangles", + "all_triangles", + "average_clustering", + "clustering", + "transitivity", + "square_clustering", + "generalized_degree", +] @_dispatchable def triangles(G: Graph[_Node], nodes=None) -> int | dict[Incomplete, int]: ... @_dispatchable +def all_triangles(G: Graph[_Node], nbunch: _NBunch[_Node] = None) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... +@_dispatchable def average_clustering( G: Graph[_Node], nodes: Iterable[_Node] | None = None, weight: str | None = None, count_zeros: bool = True ) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/__init__.pyi index 2ac869a36a..dc44dd7c34 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/__init__.pyi @@ -1,9 +1,9 @@ from networkx.algorithms.community.asyn_fluid import * +from networkx.algorithms.community.bipartitions import * from networkx.algorithms.community.centrality import * from networkx.algorithms.community.community_utils import * from networkx.algorithms.community.divisive import * from networkx.algorithms.community.kclique import * -from networkx.algorithms.community.kernighan_lin import * from networkx.algorithms.community.label_propagation import * from networkx.algorithms.community.leiden import * from networkx.algorithms.community.local import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/bipartitions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/bipartitions.pyi new file mode 100644 index 0000000000..991590f7da --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/bipartitions.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + +from networkx.algorithms.shortest_paths.weighted import _WeightFunc +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable +from numpy.random import RandomState + +__all__ = ["kernighan_lin_bisection", "spectral_modularity_bipartition", "greedy_node_swap_bipartition"] + +@_dispatchable +def kernighan_lin_bisection( + G: Graph[_Node], + partition: tuple[Iterable[Incomplete], Iterable[Incomplete]] | None = None, + max_iter: int = 10, + weight: str | _WeightFunc[_Node] = "weight", + seed: int | RandomState | None = None, +) -> tuple[set[Incomplete], set[Incomplete]]: ... +def spectral_modularity_bipartition(G: Graph[_Node]) -> tuple[set[Incomplete], set[Incomplete]]: ... +def greedy_node_swap_bipartition( + G: Graph[_Node], *, init_split: tuple[set[Incomplete], set[Incomplete]] | None = None, max_iter: int = 10 +) -> tuple[set[Incomplete], set[Incomplete]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi deleted file mode 100644 index 00133f8313..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/community/kernighan_lin.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from _typeshed import Incomplete - -from networkx.classes.graph import Graph, _Node -from networkx.utils.backends import _dispatchable -from numpy.random import RandomState - -__all__ = ["kernighan_lin_bisection"] - -@_dispatchable -def kernighan_lin_bisection( - G: Graph[_Node], - partition: tuple[Incomplete] | None = None, - max_iter: int = 10, - weight: str = "weight", - seed: int | RandomState | None = None, -): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/core.pyi index 386a3c65b0..f24963a9b3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/core.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete +from collections.abc import Mapping from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -8,13 +9,13 @@ __all__ = ["core_number", "k_core", "k_shell", "k_crust", "k_corona", "k_truss", @_dispatchable def core_number(G: Graph[_Node]) -> dict[Incomplete, Incomplete]: ... @_dispatchable -def k_core(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... +def k_core(G: Graph[_Node], k: int | None = None, core_number: Mapping[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_shell(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... +def k_shell(G: Graph[_Node], k: int | None = None, core_number: Mapping[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_crust(G: Graph[_Node], k: int | None = None, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... +def k_crust(G: Graph[_Node], k: int | None = None, core_number: Mapping[Incomplete, Incomplete] | None = None): ... @_dispatchable -def k_corona(G: Graph[_Node], k: int, core_number: SupportsGetItem[Incomplete, Incomplete] | None = None): ... +def k_corona(G: Graph[_Node], k: int | None, core_number: Mapping[Incomplete, Incomplete] | None = None): ... @_dispatchable def k_truss(G: Graph[_Node], k: int): ... @_dispatchable diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/dag.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/dag.pyi index 3bb3eca9b9..95f72a2d35 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/dag.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/dag.pyi @@ -21,7 +21,6 @@ __all__ = [ "dag_longest_path", "dag_longest_path_length", "dag_to_branching", - "compute_v_structures", ] @_dispatchable @@ -61,5 +60,3 @@ def dag_longest_path( def dag_longest_path_length(G: DiGraph[_Node], weight: str | None = "weight", default_weight: int | None = 1) -> int: ... @_dispatchable def dag_to_branching(G: DiGraph[_Node]) -> DiGraph[_Node]: ... -@_dispatchable -def compute_v_structures(G: DiGraph[_Node]) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/distance_regular.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/distance_regular.pyi index 9061e2e596..0735afbc4e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/distance_regular.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/distance_regular.pyi @@ -9,8 +9,8 @@ __all__ = ["is_distance_regular", "is_strongly_regular", "intersection_array", " @_dispatchable def is_distance_regular(G: Graph[_Node]) -> bool: ... @_dispatchable -def global_parameters(b, c) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... +def global_parameters(b: list[Incomplete], c: list[Incomplete]) -> Generator[tuple[Incomplete, Incomplete, Incomplete]]: ... @_dispatchable -def intersection_array(G: Graph[_Node]): ... +def intersection_array(G: Graph[_Node]) -> tuple[list[Incomplete], list[Incomplete]]: ... @_dispatchable def is_strongly_regular(G: Graph[_Node]) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi index 76b00f5fd0..1deb5eedf2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/isomorphism/ismags.pyi @@ -1,18 +1,31 @@ from _typeshed import Incomplete -from collections.abc import Generator +from collections.abc import Callable, Generator, Hashable, Iterable +from typing import Any __all__ = ["ISMAGS"] +def are_all_equal(iterable: Iterable[Any]) -> bool: ... +def make_partition( + items: Iterable[Hashable], test: Callable[[Hashable, Hashable], bool], check: bool = True +) -> list[set[Incomplete]]: ... +def node_to_part_ID_dict(partition: Iterable[Iterable[Incomplete]]) -> dict[Incomplete, int]: ... +def color_degree_by_node(G, n_colors, e_colors): ... + +class EdgeLookup: + edge_dict: Incomplete + def __init__(self, edge_dict) -> None: ... + def __getitem__(self, edge): ... + def items(self): ... + class ISMAGS: graph: Incomplete subgraph: Incomplete - node_equality: Incomplete - edge_equality: Incomplete def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None) -> None: ... + def create_aligned_partitions(self, thing_matcher, sg_things, g_things): ... def find_isomorphisms(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, Incomplete]: ... def largest_common_subgraph(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, None]: ... - def analyze_symmetry(self, graph, node_partitions, edge_colors): ... + def analyze_subgraph_symmetry(self) -> dict[Hashable, set[Hashable]]: ... def is_isomorphic(self, symmetry: bool = False) -> bool: ... def subgraph_is_isomorphic(self, symmetry: bool = False) -> bool: ... def isomorphisms_iter(self, symmetry: bool = True) -> Generator[Incomplete, Incomplete, None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/non_randomness.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/non_randomness.pyi index b9b8f840aa..45ed928119 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/non_randomness.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/non_randomness.pyi @@ -4,4 +4,4 @@ from networkx.utils.backends import _dispatchable __all__ = ["non_randomness"] @_dispatchable -def non_randomness(G: Graph[_Node], k: int | None = None, weight: str | None = "weight"): ... +def non_randomness(G: Graph[_Node], k: int | None = None, weight: str | None = "weight") -> tuple[float, float]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/perfect_graph.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/perfect_graph.pyi new file mode 100644 index 0000000000..504389d87e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/perfect_graph.pyi @@ -0,0 +1,7 @@ +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable + +__all__ = ["is_perfect_graph"] + +@_dispatchable +def is_perfect_graph(G: Graph[_Node]) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/planarity.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/planarity.pyi index 45de3dec2b..0cfc0a3613 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/planarity.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/planarity.pyi @@ -1,8 +1,9 @@ -from _typeshed import Incomplete -from collections.abc import Generator, Mapping, MutableSet, Reversible +from _typeshed import Incomplete, Unused +from collections.abc import Generator, Iterable, Mapping, MutableSet, Reversible +from typing import NoReturn from networkx.classes.digraph import DiGraph -from networkx.classes.graph import Graph, _Node +from networkx.classes.graph import Graph, _EdgePlus, _Node from networkx.utils.backends import _dispatchable __all__ = ["check_planarity", "is_planar", "PlanarEmbedding"] @@ -87,6 +88,9 @@ class LRPlanarity: def sign(self, e): ... def sign_recursive(self, e): ... +# NOTE: Graph subclasses relationships are so complex +# we're only overriding methods that differ in signature from the base classes +# to use inheritance to our advantage and reduce complexity class PlanarEmbedding(DiGraph[_Node]): def get_data(self) -> dict[_Node, list[_Node]]: ... def set_data(self, data: Mapping[_Node, Reversible[_Node]]) -> None: ... @@ -101,4 +105,9 @@ class PlanarEmbedding(DiGraph[_Node]): def traverse_face( self, v: _Node, w: _Node, mark_half_edges: MutableSet[tuple[_Node, _Node]] | None = None ) -> list[_Node]: ... - def to_undirected(self, reciprocal: bool = False, as_view: bool = False) -> Graph[_Node]: ... # type: ignore[override] + # Overriden in __init__ to always raise + def add_edge(self, u_of_edge: _Node, v_of_edge: _Node, **attr: Unused) -> NoReturn: ... + def add_edges_from(self, ebunch_to_add: Iterable[_EdgePlus[_Node]], **attr: Unused) -> NoReturn: ... + def add_weighted_edges_from( + self, ebunch_to_add: Iterable[tuple[_Node, _Node, float]], weight: str = "weight", **attr: Unused + ) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/similarity.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/similarity.pyi index e2fd83794a..9b67e69e6f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/similarity.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/similarity.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, SupportsGetItem +from _typeshed import Incomplete, SupportsItemAccess from collections.abc import Callable, Generator from networkx.classes.graph import Graph, _Node @@ -12,6 +12,7 @@ __all__ = [ "optimize_edit_paths", "simrank_similarity", "panther_similarity", + "panther_vector_similarity", "generate_random_paths", ] @@ -93,15 +94,30 @@ def panther_similarity( path_length: int = 5, c: float = 0.5, delta: float = 0.1, - eps=None, + eps: float | None = None, weight: str | None = "weight", + seed: int | RandomState | None = None, ) -> dict[bytes, bytes]: ... @_dispatchable +def panther_vector_similarity( + G: Graph[_Node], + source: _Node, + *, + D: int = 10, + k: int = 5, + path_length: int = 5, + c: float = 0.5, + delta: float = 0.1, + eps: float | None = None, + weight: str | None = "weight", + seed: int | RandomState | None = None, +) -> dict[Incomplete, float]: ... +@_dispatchable def generate_random_paths( G: Graph[_Node], sample_size: int, path_length: int = 5, - index_map: SupportsGetItem[Incomplete, Incomplete] | None = None, + index_map: SupportsItemAccess[Incomplete, Incomplete] | None = None, weight: str | None = "weight", seed: int | RandomState | None = None, *, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/threshold.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/threshold.pyi index 73471f8f85..b0edc50fa7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/threshold.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/threshold.pyi @@ -37,4 +37,3 @@ def eigenvalues(creation_sequence): ... def random_threshold_sequence(n, p, seed: int | RandomState | None = None): ... def right_d_threshold_sequence(n: int, m: int) -> list[str]: ... def left_d_threshold_sequence(n: int, m: int) -> list[str]: ... -def swap_d(cs, p_split=1.0, p_combine=1.0, seed: int | RandomState | None = None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/__init__.pyi index b6f2ed45da..1a901de7bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/__init__.pyi @@ -1,6 +1,7 @@ from .branchings import * from .coding import * from .decomposition import * +from .distance_measures import * from .mst import * from .operations import * from .recognition import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/distance_measures.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/distance_measures.pyi new file mode 100644 index 0000000000..eb523e2331 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/tree/distance_measures.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from networkx.classes.graph import Graph, _Node +from networkx.utils.backends import _dispatchable + +__all__ = ["center", "centroid"] + +def center(G: Graph[_Node]) -> list[Incomplete]: ... +@_dispatchable +def centroid(G: Graph[_Node]) -> list[Incomplete]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/wiener.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/wiener.pyi index f8f33473d8..5673df94c0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/wiener.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/algorithms/wiener.pyi @@ -1,7 +1,7 @@ from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable -__all__ = ["wiener_index", "schultz_index", "gutman_index"] +__all__ = ["wiener_index", "schultz_index", "gutman_index", "hyper_wiener_index"] @_dispatchable def wiener_index(G: Graph[_Node], weight: str | None = None) -> float: ... @@ -9,3 +9,5 @@ def wiener_index(G: Graph[_Node], weight: str | None = None) -> float: ... def schultz_index(G: Graph[_Node], weight=None) -> float: ... @_dispatchable def gutman_index(G: Graph[_Node], weight=None) -> float: ... +@_dispatchable +def hyper_wiener_index(G: Graph[_Node], weight=None) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/digraph.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/digraph.pyi index 482c2c208c..281ee14157 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/digraph.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/digraph.pyi @@ -10,6 +10,7 @@ from networkx.classes.reportviews import ( InDegreeView, InEdgeView, InMultiDegreeView, + InMultiEdgeView, OutDegreeView, OutEdgeView, OutMultiDegreeView, @@ -17,6 +18,9 @@ from networkx.classes.reportviews import ( __all__ = ["DiGraph"] +# NOTE: Graph subclasses relationships are so complex +# we're only overriding methods that differ in signature from the base classes +# to use inheritance to our advantage and reduce complexity class DiGraph(Graph[_Node]): @cached_property def succ(self) -> AdjacencyView[_Node, _Node, dict[str, Any]]: ... @@ -30,17 +34,19 @@ class DiGraph(Graph[_Node]): def predecessors(self, n: _Node) -> Iterator[_Node]: ... @cached_property + def edges(self) -> OutEdgeView[_Node]: ... + @cached_property def out_edges(self) -> OutEdgeView[_Node]: ... @cached_property - def in_edges(self) -> InEdgeView[_Node]: ... + # Including subtypes' possible return types for LSP + def in_edges(self) -> InEdgeView[_Node] | InMultiEdgeView[_Node]: ... + @cached_property + def degree(self) -> DiDegreeView[_Node]: ... @cached_property + # Including subtypes' possible return types for LSP def in_degree(self) -> InDegreeView[_Node] | InMultiDegreeView[_Node]: ... @cached_property + # Including subtypes' possible return types for LSP def out_degree(self) -> OutDegreeView[_Node] | OutMultiDegreeView[_Node]: ... - def to_undirected(self, reciprocal: bool = False, as_view: bool = False) -> Graph[_Node]: ... # type: ignore[override] - # reciprocal : If True, only edges that appear in both directions ... will be kept in the undirected graph. + def to_undirected(self, reciprocal: bool = False, as_view: bool = False) -> Graph[_Node]: ... # type: ignore[override] # Has an additional `reciprocal` keyword argument def reverse(self, copy: bool = True) -> Self: ... - @cached_property - def edges(self) -> OutEdgeView[_Node]: ... # type: ignore[override] # An OutEdgeView of the DiGraph as G.edges or G.edges(). - @cached_property - def degree(self) -> int | DiDegreeView[_Node]: ... # type: ignore[override] # Returns DiDegreeView or int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/function.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/function.pyi index 2cca12ee26..9ee302b90b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/function.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/function.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete, SupportsItems, SupportsKeysAndGetItem, Unused -from collections.abc import Generator, Hashable, Iterable, Iterator +from collections.abc import Callable, Generator, Hashable, Iterable, Iterator from typing import Literal, TypeVar, overload from networkx import _dispatchable @@ -48,6 +48,7 @@ __all__ = [ "number_of_selfloops", "path_weight", "is_path", + "describe", ] _U = TypeVar("_U") @@ -171,3 +172,4 @@ def selfloop_edges( def number_of_selfloops(G: Graph[Hashable]) -> int: ... def is_path(G: Graph[_Node], path: Iterable[Incomplete]) -> bool: ... def path_weight(G: Graph[_Node], path, weight) -> int: ... +def describe(G: Graph[_Node], describe_hook: Callable[[Graph[_Node]], dict[str, Incomplete]] | None = None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/graph.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/graph.pyi index ffdab103ea..086c697e26 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/graph.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/graph.pyi @@ -6,7 +6,7 @@ from typing_extensions import Self, TypeAlias import numpy from networkx.classes.coreviews import AdjacencyView, AtlasView from networkx.classes.digraph import DiGraph -from networkx.classes.reportviews import DegreeView, EdgeView, NodeView +from networkx.classes.reportviews import DegreeView, DiDegreeView, EdgeView, NodeView, OutEdgeView _Node = TypeVar("_Node", bound=Hashable) _NodeWithData: TypeAlias = tuple[_Node, dict[str, Any]] @@ -41,6 +41,9 @@ class Graph(Collection[_Node]): def to_directed_class(self) -> type[DiGraph[_Node]]: ... def to_undirected_class(self) -> type[Graph[_Node]]: ... + # @_dispatchable adds `backend` argument, but this decorated is unsupported constructor type here + # and __init__() ignores this argument + def __new__(cls, *args, backend=None, **kwargs) -> Self: ... def __init__(self, incoming_graph_data: _Data[_Node] | None = None, **attr: Any) -> None: ... # attr: key=value pairs @cached_property def adj(self) -> AdjacencyView[_Node, _Node, dict[str, Any]]: ... @@ -49,10 +52,10 @@ class Graph(Collection[_Node]): def name(self) -> str: ... @name.setter def name(self, s: str) -> None: ... - def __getitem__(self, n: _Node) -> AtlasView[_Node, str, Any]: ... def __iter__(self) -> Iterator[_Node]: ... def __contains__(self, n: object) -> bool: ... def __len__(self) -> int: ... + def __getitem__(self, n: _Node) -> AtlasView[_Node, str, Any]: ... def add_node(self, node_for_adding: _Node, **attr: Any) -> None: ... # attr: Set or change node attributes using key=value def add_nodes_from(self, nodes_for_adding: Iterable[_NodePlus[_Node]], **attr: Any) -> None: ... # attr: key=value pairs def remove_node(self, n: _Node) -> None: ... @@ -62,7 +65,8 @@ class Graph(Collection[_Node]): def number_of_nodes(self) -> int: ... def order(self) -> int: ... def has_node(self, n: _Node) -> bool: ... - def add_edge(self, u_of_edge: _Node, v_of_edge: _Node, **attr: Any) -> None: ... + # Including subtypes' possible return types for LSP + def add_edge(self, u_of_edge: _Node, v_of_edge: _Node, **attr: Any) -> Hashable | None: ... # attr: Edge data (or labels or objects) can be assigned using keyword arguments def add_edges_from(self, ebunch_to_add: Iterable[_EdgePlus[_Node]], **attr: Any) -> None: ... # attr: Edge data (or labels or objects) can be assigned using keyword arguments @@ -81,12 +85,14 @@ class Graph(Collection[_Node]): def has_edge(self, u: _Node, v: _Node) -> bool: ... def neighbors(self, n: _Node) -> Iterator[_Node]: ... @cached_property - def edges(self) -> EdgeView[_Node]: ... + # Including subtypes' possible return types for LSP + def edges(self) -> EdgeView[_Node] | OutEdgeView[_Node]: ... def get_edge_data(self, u: _Node, v: _Node, default: Any = None) -> dict[str, Any]: ... # default: any Python object def adjacency(self) -> Iterator[tuple[_Node, dict[_Node, dict[str, Any]]]]: ... @cached_property - def degree(self) -> int | DegreeView[_Node]: ... + # Including subtypes' possible return types for LSP + def degree(self) -> DegreeView[_Node] | DiDegreeView[_Node]: ... def clear(self) -> None: ... def clear_edges(self) -> None: ... def is_multigraph(self) -> bool: ... @@ -94,7 +100,7 @@ class Graph(Collection[_Node]): def copy(self, as_view: bool = False) -> Self: ... def to_directed(self, as_view: bool = False) -> DiGraph[_Node]: ... def to_undirected(self, as_view: bool = False) -> Graph[_Node]: ... - def subgraph(self, nodes: Iterable[_Node]) -> Graph[_Node]: ... + def subgraph(self, nodes: _NBunch[_Node]) -> Graph[_Node]: ... def edge_subgraph(self, edges: Iterable[_Edge[_Node]]) -> Graph[_Node]: ... @overload def size(self, weight: None = None) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multidigraph.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multidigraph.pyi index 374b793d84..190d5c645c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multidigraph.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multidigraph.pyi @@ -3,11 +3,11 @@ from typing import Any from networkx.classes.coreviews import MultiAdjacencyView from networkx.classes.digraph import DiGraph -from networkx.classes.graph import _EdgeWithData, _Node +from networkx.classes.graph import _Node from networkx.classes.multigraph import MultiGraph from networkx.classes.reportviews import ( + DiMultiDegreeView, InMultiDegreeView, - InMultiEdgeDataView, InMultiEdgeView, OutMultiDegreeView, OutMultiEdgeView, @@ -15,23 +15,24 @@ from networkx.classes.reportviews import ( __all__ = ["MultiDiGraph"] +# NOTE: Graph subclasses relationships are so complex +# we're only overriding methods that differ in signature from the base classes +# to use inheritance to our advantage and reduce complexity class MultiDiGraph(MultiGraph[_Node], DiGraph[_Node]): @cached_property def succ(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Any]]: ... @cached_property def pred(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Any]]: ... @cached_property - def edges(self) -> OutMultiEdgeView[_Node]: ... # type: ignore[override] - # Returns: OutMultiEdgeView + def edges(self) -> OutMultiEdgeView[_Node]: ... @cached_property def out_edges(self) -> OutMultiEdgeView[_Node]: ... @cached_property - def in_edges(self) -> InMultiEdgeView[_Node] | InMultiEdgeDataView[_Node, _EdgeWithData[_Node]]: ... # type: ignore[override] - # Returns : InMultiEdgeView or InMultiEdgeDataView + def in_edges(self) -> InMultiEdgeView[_Node]: ... + @cached_property + def degree(self) -> DiMultiDegreeView[_Node]: ... @cached_property def in_degree(self) -> InMultiDegreeView[_Node]: ... @cached_property def out_degree(self) -> OutMultiDegreeView[_Node]: ... - def to_undirected(self, reciprocal: bool = False, as_view: bool = False) -> MultiGraph[_Node]: ... # type: ignore[override] - def reverse(self, copy: bool = True) -> MultiDiGraph[_Node]: ... - def copy(self, as_view: bool = False) -> MultiDiGraph[_Node]: ... + def to_undirected(self, reciprocal: bool = False, as_view: bool = False) -> MultiGraph[_Node]: ... # type: ignore[override] # Has an additional `reciprocal` keyword argument diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multigraph.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multigraph.pyi index d0bb1ef27c..ce68d24d34 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multigraph.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/classes/multigraph.pyi @@ -1,12 +1,12 @@ from collections.abc import Hashable from functools import cached_property from typing import Any, ClassVar, overload -from typing_extensions import TypeAlias, TypeVar +from typing_extensions import Self, TypeAlias, TypeVar from networkx.classes.coreviews import MultiAdjacencyView from networkx.classes.graph import Graph, _MapFactory, _Node from networkx.classes.multidigraph import MultiDiGraph -from networkx.classes.reportviews import MultiEdgeView +from networkx.classes.reportviews import DiMultiDegreeView, MultiDegreeView, MultiEdgeView, OutMultiEdgeView _MultiEdge: TypeAlias = tuple[_Node, _Node, int] # noqa: Y047 @@ -15,36 +15,46 @@ _KeyT = TypeVar("_KeyT", bound=Hashable) __all__ = ["MultiGraph"] +# NOTE: Graph subclasses relationships are so complex +# we're only overriding methods that differ in signature from the base classes +# to use inheritance to our advantage and reduce complexity class MultiGraph(Graph[_Node]): edge_key_dict_factory: ClassVar[_MapFactory] + def to_directed_class(self) -> type[MultiDiGraph[_Node]]: ... + def to_undirected_class(self) -> type[MultiGraph[_Node]]: ... + # @_dispatchable adds `backend` argument, but this decorated is unsupported constructor type here + # and __init__() ignores this argument + def __new__(cls, *args, backend=None, **kwargs) -> Self: ... def __init__(self, incoming_graph_data=None, multigraph_input: bool | None = None, **attr: Any) -> None: ... @cached_property def adj(self) -> MultiAdjacencyView[_Node, _Node, dict[str, Any]]: ... # data can be any type def new_edge_key(self, u: _Node, v: _Node) -> int: ... - @overload # type: ignore[override] # Has an additional `key` keyword argument + # key : hashable identifier, optional (default=lowest unused integer) + @overload # type: ignore[override] # More complex overload def add_edge(self, u_for_edge: _Node, v_for_edge: _Node, key: int | None = None, **attr: Any) -> int: ... @overload def add_edge(self, u_for_edge: _Node, v_for_edge: _Node, key: _KeyT, **attr: Any) -> _KeyT: ... - # key : hashable identifier, optional (default=lowest unused integer) def remove_edge(self, u: _Node, v: _Node, key: Hashable | None = None) -> None: ... def has_edge(self, u: _Node, v: _Node, key: Hashable | None = None) -> bool: ... + @cached_property + # Including subtypes' possible return types for LSP + def edges(self) -> MultiEdgeView[_Node] | OutMultiEdgeView[_Node]: ... + # key : hashable identifier, optional (default=None). + # default : any Python object (default=None). Value to return if the specific edge (u, v, key) is not found. + # Returns: The edge attribute dictionary. @overload # type: ignore[override] def get_edge_data( self, u: _Node, v: _Node, key: Hashable, default: _DefaultT | None = None ) -> dict[str, Any] | _DefaultT: ... - # key : hashable identifier, optional (default=None). - # default : any Python object (default=None). Value to return if the specific edge (u, v, key) is not found. - # Returns: The edge attribute dictionary. + # default : any Python object (default=None). Value to return if there are no edges between u and v and no key is specified. + # Returns: A dictionary mapping edge keys to attribute dictionaries for each of those edges if no specific key is provided. @overload def get_edge_data( self, u: _Node, v: _Node, key: None = None, default: _DefaultT | None = None ) -> dict[Hashable, dict[str, Any] | _DefaultT]: ... - # default : any Python object (default=None). Value to return if there are no edges between u and v and no key is specified. - # Returns: A dictionary mapping edge keys to attribute dictionaries for each of those edges if no specific key is provided. def copy(self, as_view: bool = False) -> MultiGraph[_Node]: ... + @cached_property + # Including subtypes' possible return types for LSP + def degree(self) -> MultiDegreeView[_Node] | DiMultiDegreeView[_Node]: ... def to_directed(self, as_view: bool = False) -> MultiDiGraph[_Node]: ... def to_undirected(self, as_view: bool = False) -> MultiGraph[_Node]: ... - def number_of_edges(self, u: _Node | None = None, v: _Node | None = None) -> int: ... - @cached_property - def edges(self) -> MultiEdgeView[_Node]: ... # type: ignore[override] - # Returns: MultiEdgeView diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/layout.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/layout.pyi index 1d09971ac1..914763eb57 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/layout.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/layout.pyi @@ -141,7 +141,6 @@ def forceatlas2_layout( node_mass: Mapping[_Node, float] | None = None, node_size: Mapping[_Node, float] | None = None, weight: str | None = None, - dissuade_hubs: bool = False, linlog: bool = False, seed: Seed | None = None, dim: int = 2, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/nx_pylab.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/nx_pylab.pyi index 0d7275400d..160766f7d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/nx_pylab.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/drawing/nx_pylab.pyi @@ -110,6 +110,26 @@ def apply_matplotlib_colors( vmax: float | None = None, nodes: bool = True, ) -> None: ... + +class CurvedArrowTextBase: + arrow: FancyArrowPatch + label_pos: float + labels_horizontal: bool + ax: Axes + x: Incomplete + y: Incomplete + angle: Incomplete + def __init__( + self, + arrow: FancyArrowPatch, + *args, + label_pos: float = 0.5, + labels_horizontal: bool = False, + ax: Axes | None = None, + **kwargs, + ) -> None: ... + def draw(self, renderer) -> None: ... + def display( G: _G, canvas: Axes | None = None, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/expanders.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/expanders.pyi index 5786272dce..6a6eb131b9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/expanders.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/expanders.pyi @@ -1,3 +1,5 @@ +from typing_extensions import deprecated + from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable @@ -6,6 +8,7 @@ __all__ = [ "chordal_cycle_graph", "paley_graph", "maybe_regular_expander", + "maybe_regular_expander_graph", "is_regular_expander", "random_regular_expander_graph", ] @@ -17,7 +20,12 @@ def chordal_cycle_graph(p, create_using=None): ... @_dispatchable def paley_graph(p, create_using=None): ... @_dispatchable -def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): ... +def maybe_regular_expander_graph(n, d, *, create_using=None, max_tries: int = 100, seed=None): ... +@deprecated( + "`maybe_regular_expander` is a deprecated alias for `maybe_regular_expander_graph`. " + "Use `maybe_regular_expander_graph` instead." +) +def maybe_regular_expander(n, d, *, create_using=None, max_tries: int = 100, seed=None): ... @_dispatchable def is_regular_expander(G: Graph[_Node], *, epsilon: float = 0) -> bool: ... @_dispatchable diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/random_graphs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/random_graphs.pyi index 10a3b070fe..3345d07afc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/random_graphs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/random_graphs.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete +from typing_extensions import deprecated from networkx.utils.backends import _dispatchable @@ -20,6 +21,7 @@ __all__ = [ "extended_barabasi_albert_graph", "powerlaw_cluster_graph", "random_lobster", + "random_lobster_graph", "random_shell_graph", "random_powerlaw_tree", "random_powerlaw_tree_sequence", @@ -27,40 +29,43 @@ __all__ = [ ] @_dispatchable -def fast_gnp_random_graph(n, p, seed=None, directed: bool = False): ... +def fast_gnp_random_graph(n, p, seed=None, directed: bool = False, *, create_using=None): ... @_dispatchable -def gnp_random_graph(n, p, seed=None, directed: bool = False): ... +def gnp_random_graph(n, p, seed=None, directed: bool = False, *, create_using=None): ... binomial_graph = gnp_random_graph erdos_renyi_graph = gnp_random_graph @_dispatchable -def dense_gnm_random_graph(n, m, seed=None): ... +def dense_gnm_random_graph(n, m, seed=None, *, create_using=None): ... @_dispatchable -def gnm_random_graph(n, m, seed=None, directed: bool = False): ... +def gnm_random_graph(n, m, seed=None, directed: bool = False, *, create_using=None): ... @_dispatchable -def newman_watts_strogatz_graph(n, k, p, seed=None): ... +def newman_watts_strogatz_graph(n, k, p, seed=None, *, create_using=None): ... @_dispatchable -def watts_strogatz_graph(n, k, p, seed=None): ... +def watts_strogatz_graph(n, k, p, seed=None, *, create_using=None): ... @_dispatchable -def connected_watts_strogatz_graph(n, k, p, tries: int = 100, seed=None): ... +def connected_watts_strogatz_graph(n, k, p, tries: int = 100, seed=None, *, create_using=None): ... @_dispatchable -def random_regular_graph(d, n, seed=None): ... +def random_regular_graph(d, n, seed=None, *, create_using=None): ... @_dispatchable -def barabasi_albert_graph(n, m, seed=None, initial_graph=None) -> Graph[Incomplete]: ... +def barabasi_albert_graph(n, m, seed=None, initial_graph=None, *, create_using=None) -> Graph[Incomplete]: ... @_dispatchable -def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None) -> Graph[Incomplete]: ... +def dual_barabasi_albert_graph(n, m1, m2, p, seed=None, initial_graph=None, *, create_using=None) -> Graph[Incomplete]: ... @_dispatchable -def extended_barabasi_albert_graph(n, m, p, q, seed=None) -> Graph[Incomplete]: ... +def extended_barabasi_albert_graph(n, m, p, q, seed=None, *, create_using=None) -> Graph[Incomplete]: ... @_dispatchable -def powerlaw_cluster_graph(n, m, p, seed=None): ... +def powerlaw_cluster_graph(n, m, p, seed=None, *, create_using=None): ... @_dispatchable -def random_lobster(n, p1, p2, seed=None): ... +def random_lobster_graph(n, p1, p2, seed=None, *, create_using=None): ... @_dispatchable -def random_shell_graph(constructor, seed=None): ... +@deprecated("`random_lobster` is a deprecated alias for `random_lobster_graph`. Use `random_lobster_graph` instead.") +def random_lobster(n, p1, p2, seed=None, *, create_using=None): ... @_dispatchable -def random_powerlaw_tree(n, gamma: float = 3, seed=None, tries: int = 100): ... +def random_shell_graph(constructor, seed=None, *, create_using=None): ... +@_dispatchable +def random_powerlaw_tree(n, gamma: float = 3, seed=None, tries: int = 100, *, create_using=None): ... @_dispatchable def random_powerlaw_tree_sequence(n, gamma: float = 3, seed=None, tries: int = 100): ... @_dispatchable -def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None): ... +def random_kernel_graph(n, kernel_integral, kernel_root=None, seed=None, *, create_using=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/small.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/small.pyi index 4d5f7dc952..d9a242eb19 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/small.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/generators/small.pyi @@ -9,6 +9,7 @@ __all__ = [ "diamond_graph", "dodecahedral_graph", "frucht_graph", + "generalized_petersen_graph", "heawood_graph", "hoffman_singleton_graph", "house_graph", @@ -63,6 +64,8 @@ def pappus_graph(): ... @_dispatchable def petersen_graph(create_using=None): ... @_dispatchable +def generalized_petersen_graph(n: int, k: int, *, create_using=None): ... +@_dispatchable def sedgewick_maze_graph(create_using=None): ... @_dispatchable def tetrahedral_graph(create_using=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi index 19cd0e1ca5..57f07ceddd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/readwrite/json_graph/node_link.pyi @@ -1,29 +1,8 @@ -from typing import overload -from typing_extensions import deprecated - from networkx.classes.graph import Graph, _Node from networkx.utils.backends import _dispatchable __all__ = ["node_link_data", "node_link_graph"] -@overload -@deprecated( - """\ -The `link` argument is deprecated and will be removed in version `3.6`. -Use the `edges` keyword instead.""" -) -def node_link_data( - G: Graph[_Node], - *, - source: str = "source", - target: str = "target", - name: str = "id", - key: str = "key", - edges: str | None = None, - nodes: str = "nodes", - link: str | None = None, -): ... -@overload def node_link_data( G: Graph[_Node], *, @@ -31,7 +10,7 @@ def node_link_data( target: str = "target", name: str = "id", key: str = "key", - edges: str | None = None, + edges: str = "edges", nodes: str = "nodes", ): ... @_dispatchable @@ -45,7 +24,6 @@ def node_link_graph( target: str = "target", name: str = "id", key: str = "key", - edges: str | None = None, + edges: str = "edges", nodes: str = "nodes", - link: str | None = None, ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/misc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/misc.pyi index f28f54ac5e..f925c63d1b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/misc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/misc.pyi @@ -59,6 +59,6 @@ class PythonRandomInterface: def create_py_random_state(random_state: _RandomState = None): ... def nodes_equal(nodes1, nodes2) -> bool: ... -def edges_equal(edges1, edges2) -> bool: ... +def edges_equal(edges1, edges2, *, directed: bool = False) -> bool: ... def graphs_equal(graph1, graph2) -> bool: ... def _clear_cache(G: Graph[_Node]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/random_sequence.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/random_sequence.pyi index e298501924..7bcca8b6d4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/random_sequence.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/random_sequence.pyi @@ -1,5 +1,9 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + __all__ = [ "powerlaw_sequence", + "is_valid_tree_degree_sequence", "zipf_rv", "cumulative_distribution", "discrete_sequence", @@ -8,6 +12,7 @@ __all__ = [ ] def powerlaw_sequence(n, exponent: float = 2.0, seed=None): ... +def is_valid_tree_degree_sequence(degree_sequence: Iterable[Incomplete]) -> tuple[bool, str]: ... def zipf_rv(alpha, xmin: int = 1, seed=None) -> int: ... def cumulative_distribution(distribution): ... def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/union_find.pyi b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/union_find.pyi index 97df662a39..1d73f4eb7b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/union_find.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/networkx/networkx/utils/union_find.pyi @@ -1,11 +1,13 @@ -from _typeshed import Incomplete -from collections.abc import Generator, Iterator +from collections.abc import Generator, Iterable, Iterator, Mapping +from typing import Generic, TypeVar -class UnionFind: - parents: Incomplete - weights: Incomplete - def __init__(self, elements=None) -> None: ... - def __getitem__(self, object): ... - def __iter__(self) -> Iterator[Incomplete]: ... - def to_sets(self) -> Generator[Incomplete, Incomplete, None]: ... - def union(self, *objects): ... +_T = TypeVar("_T") + +class UnionFind(Generic[_T]): + parents: Mapping[_T, _T] + weights: Mapping[_T, int] + def __init__(self, elements: Iterable[_T] | None = None) -> None: ... + def __getitem__(self, object: _T) -> _T: ... + def __iter__(self) -> Iterator[_T]: ... + def to_sets(self) -> Generator[set[_T]]: ... + def union(self, *objects: _T) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/METADATA.toml index 698a927e83..8d2d5ca5fe 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.10.*" +version = "0.11.*" upstream_repository = "https://github.com/erikrose/parsimonious" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/parsimonious/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/parsimonious/exceptions.pyi index 10f2ca5c7e..6b3eb1ac38 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/parsimonious/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/parsimonious/parsimonious/exceptions.pyi @@ -3,7 +3,9 @@ from parsimonious.grammar import LazyReference from parsimonious.nodes import Node from parsimonious.utils import StrAndRepr -class ParseError(StrAndRepr, Exception): +class ParsimoniousError(Exception): ... + +class ParseError(StrAndRepr, ParsimoniousError): text: str pos: int expr: Expression | None @@ -14,11 +16,11 @@ class ParseError(StrAndRepr, Exception): class LeftRecursionError(ParseError): ... class IncompleteParseError(ParseError): ... -class VisitationError(Exception): +class VisitationError(ParsimoniousError): original_class: type[BaseException] def __init__(self, exc: BaseException, exc_class: type[BaseException], node: Node) -> None: ... -class BadGrammar(StrAndRepr, Exception): ... +class BadGrammar(StrAndRepr, ParsimoniousError): ... class UndefinedLabel(BadGrammar): label: LazyReference diff --git a/packages/pyright-internal/typeshed-fallback/stubs/peewee/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/peewee/METADATA.toml index 4f56e0596b..38405dda95 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/peewee/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/peewee/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.18.2" +version = "3.18.3" upstream_repository = "https://github.com/coleifer/peewee" # We're not providing stubs for all playhouse modules right now # https://github.com/python/typeshed/pull/11731#issuecomment-2065729058 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi index bdcd5208b9..6fa55546e9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -1,4 +1,4 @@ -from collections.abc import Callable, Iterable, Iterator, MutableMapping, Sequence +from collections.abc import Callable, Iterable, Iterator, MutableMapping, MutableSequence, Sequence from typing import Any, Protocol, SupportsIndex, TypeVar, overload, type_check_only from typing_extensions import Self @@ -25,12 +25,13 @@ class BaseContainer(Sequence[_T]): def __hash__(self) -> int: ... # Same as list.sort, the extra sort_function kwarg errors in Python 3 def sort(self, *, key: Callable[[_T], Any] | None = None, reverse: bool = False) -> None: ... + def reverse(self) -> None: ... @overload def __getitem__(self, key: SupportsIndex) -> _T: ... @overload def __getitem__(self, key: slice) -> list[_T]: ... -class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): +class RepeatedScalarFieldContainer(BaseContainer[_ScalarV], MutableSequence[_ScalarV]): __slots__ = ["_type_checker"] def __init__(self, message_listener: MessageListener, type_checker: _ValueChecker[_ScalarV]) -> None: ... def append(self, value: _ScalarV) -> None: ... @@ -46,7 +47,7 @@ class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): def __delitem__(self, key: int | slice) -> None: ... def __eq__(self, other: object) -> bool: ... -class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): +class RepeatedCompositeFieldContainer(BaseContainer[_MessageV], MutableSequence[_MessageV]): __slots__ = ["_message_descriptor"] def __init__(self, message_listener: MessageListener, message_descriptor: Descriptor) -> None: ... def add(self, **kwargs: Any) -> _MessageV: ... @@ -56,6 +57,10 @@ class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): def MergeFrom(self, other: Self | Iterable[_MessageV]) -> None: ... def remove(self, elem: _MessageV) -> None: ... def pop(self, key: int = -1) -> _MessageV: ... + @overload + def __setitem__(self, key: int, value: _MessageV) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[_MessageV]) -> None: ... def __delitem__(self, key: int | slice) -> None: ... def __eq__(self, other: object) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi index 69649998db..6527d3f697 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi @@ -67,7 +67,7 @@ _StructValue: TypeAlias = struct_pb2.Struct | struct_pb2.ListValue | str | float _StructValueArg: TypeAlias = _StructValue | Mapping[str, _StructValueArg] | Sequence[_StructValueArg] class Struct: - __slots__: tuple[()] = () + __slots__: tuple[str, ...] = () def __getitem__(self, key: str) -> _StructValue: ... def __setitem__(self, key: str, value: _StructValueArg) -> None: ... def __delitem__(self, key: str) -> None: ... @@ -81,7 +81,7 @@ class Struct: def update(self, dictionary: SupportsItems[str, _StructValueArg]) -> None: ... class ListValue: - __slots__: tuple[()] = () + __slots__: tuple[str, ...] = () def __len__(self) -> int: ... def append(self, value: _StructValue) -> None: ... def extend(self, elem_seq: Iterable[_StructValue]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi index 5145b5330a..bd2aff9b76 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi @@ -12,7 +12,7 @@ class DecodeError(Error): ... class EncodeError(Error): ... class Message: - __slots__: tuple[()] = () + __slots__: tuple[str, ...] = () DESCRIPTOR: Descriptor | _upb_Descriptor def __deepcopy__(self, memo: Any = None) -> Self: ... def __eq__(self, other_msg): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml index 85e2bcc2e4..c8c4468498 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml @@ -1,4 +1,4 @@ -version = "7.0.*" +version = "7.2.1" upstream_repository = "https://github.com/giampaolo/psutil" [tool.stubtest] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi index 817a93127d..d9238ebf66 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi @@ -2,6 +2,7 @@ import sys from _typeshed import Incomplete from collections.abc import Callable, Iterable, Iterator from contextlib import AbstractContextManager +from types import TracebackType from typing import Any, Literal, Protocol, overload, type_check_only from typing_extensions import Self, TypeAlias, deprecated @@ -51,29 +52,10 @@ from psutil._common import ( NoSuchProcess as NoSuchProcess, TimeoutExpired as TimeoutExpired, ZombieProcess as ZombieProcess, - pconn, - pcputimes, - pctxsw, - pgids, - pionice, - popenfile, - pthread, - puids, - sconn, - scpufreq, - scpustats, - sdiskio, - sdiskpart, - sdiskusage, - sfan, - shwtemp, - snetio, - snicaddr, - snicstats, - sswap, - suser, ) +from . import _ntuples as _ntp + if sys.platform == "linux": from ._pslinux import ( IOPRIO_CLASS_BE as IOPRIO_CLASS_BE, @@ -81,8 +63,8 @@ if sys.platform == "linux": IOPRIO_CLASS_NONE as IOPRIO_CLASS_NONE, IOPRIO_CLASS_RT as IOPRIO_CLASS_RT, ) - def sensors_temperatures(fahrenheit: bool = ...) -> dict[str, list[shwtemp]]: ... - def sensors_fans() -> dict[str, list[sfan]]: ... + def sensors_temperatures(fahrenheit: bool = False) -> dict[str, list[_ntp.shwtemp]]: ... + def sensors_fans() -> dict[str, list[_ntp.sfan]]: ... PROCFS_PATH: str RLIMIT_AS: int RLIMIT_CORE: int @@ -120,28 +102,19 @@ if sys.platform == "win32": win_service_iter as win_service_iter, ) +# Linux + glibc, Windows, macOS, FreeBSD, NetBSD: +def heap_info() -> _ntp.pheap: ... +def heap_trim() -> None: ... + if sys.platform == "linux": - from ._pslinux import pfullmem, pmem, scputimes, sensors_battery as sensors_battery, svmem + from ._pslinux import sensors_battery as sensors_battery elif sys.platform == "darwin": - from ._psosx import pfullmem, pmem, scputimes, sensors_battery as sensors_battery, svmem + from ._psosx import sensors_battery as sensors_battery elif sys.platform == "win32": - from ._pswindows import pfullmem, pmem, scputimes, sensors_battery as sensors_battery, svmem + from ._pswindows import sensors_battery as sensors_battery else: - scputimes = Incomplete - - class pmem(Any): ... - class pfullmem(Any): ... - class svmem(Any): ... - def sensors_battery(): ... -if sys.platform == "linux": - from ._pslinux import pio -elif sys.platform == "win32": - from ._pswindows import pio -else: - from ._common import pio - AF_LINK: int version_info: tuple[int, int, int] __version__: str @@ -172,11 +145,11 @@ class Process: @property def pid(self) -> int: ... # Only present if attrs argument is passed to process_iter - info: dict[str, Any] + info: dict[str, Incomplete] def oneshot(self) -> AbstractContextManager[None]: ... def as_dict( self, attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = None, ad_value=None - ) -> dict[str, Any]: ... + ) -> dict[str, Incomplete]: ... def parent(self) -> Process | None: ... def parents(self) -> list[Process]: ... def is_running(self) -> bool: ... @@ -190,47 +163,52 @@ class Process: def cwd(self) -> str: ... def nice(self, value: int | None = None) -> int: ... if sys.platform != "win32": - def uids(self) -> puids: ... - def gids(self) -> pgids: ... + def uids(self) -> _ntp.puids: ... + def gids(self) -> _ntp.pgids: ... def terminal(self) -> str: ... def num_fds(self) -> int: ... if sys.platform != "darwin": - def io_counters(self) -> pio: ... - def ionice(self, ioclass: int | None = None, value: int | None = None) -> pionice: ... - def cpu_affinity(self, cpus: list[int] | None = None) -> list[int] | None: ... - def memory_maps(self, grouped: bool = True): ... + def io_counters(self) -> _ntp.pio: ... + def ionice(self, ioclass: int | None = None, value: int | None = None) -> _ntp.pionice: ... + @overload + def cpu_affinity(self, cpus: None = None) -> list[int]: ... + @overload + def cpu_affinity(self, cpus: list[int]) -> None: ... + def memory_maps(self, grouped: bool = True) -> list[Incomplete]: ... if sys.platform == "linux": - def rlimit(self, resource: int, limits: tuple[int, int] | None = ...) -> tuple[int, int]: ... + def rlimit(self, resource: int, limits: tuple[int, int] | None = None) -> tuple[int, int]: ... def cpu_num(self) -> int: ... def environ(self) -> dict[str, str]: ... if sys.platform == "win32": def num_handles(self) -> int: ... - def num_ctx_switches(self) -> pctxsw: ... + def num_ctx_switches(self) -> _ntp.pctxsw: ... def num_threads(self) -> int: ... - def threads(self) -> list[pthread]: ... + def threads(self) -> list[_ntp.pthread]: ... def children(self, recursive: bool = False) -> list[Process]: ... def cpu_percent(self, interval: float | None = None) -> float: ... - def cpu_times(self) -> pcputimes: ... - def memory_info(self) -> pmem: ... - def memory_full_info(self) -> pfullmem: ... + def cpu_times(self) -> _ntp.pcputimes: ... + def memory_info(self) -> _ntp.pmem: ... + def memory_full_info(self) -> _ntp.pfullmem: ... def memory_percent(self, memtype: str = "rss") -> float: ... - def open_files(self) -> list[popenfile]: ... + def open_files(self) -> list[_ntp.popenfile]: ... @deprecated('use "net_connections" method instead') - def connections(self, kind: str = "inet") -> list[pconn]: ... + def connections(self, kind: str = "inet") -> list[_ntp.pconn]: ... def send_signal(self, sig: int) -> None: ... def suspend(self) -> None: ... def resume(self) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def wait(self, timeout: float | None = None) -> int: ... - def net_connections(self, kind: str = "inet") -> list[pconn]: ... + def net_connections(self, kind: str = "inet") -> list[_ntp.pconn]: ... class Popen(Process): def __init__(self, *args, **kwargs) -> None: ... def __enter__(self) -> Self: ... - def __exit__(self, *args: object, **kwargs: object) -> None: ... + def __exit__( + self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... def __getattribute__(self, name: str) -> Any: ... def __dir__(self) -> list[str]: ... @@ -251,13 +229,13 @@ def wait_procs( ) -> tuple[list[Process], list[Process]]: ... def cpu_count(logical: bool = True) -> int | None: ... @overload -def cpu_freq(percpu: Literal[False] = False) -> scpufreq: ... +def cpu_freq(percpu: Literal[False] = False) -> _ntp.scpufreq: ... @overload -def cpu_freq(percpu: Literal[True]) -> list[scpufreq]: ... +def cpu_freq(percpu: Literal[True]) -> list[_ntp.scpufreq]: ... @overload -def cpu_times(percpu: Literal[False] = False) -> scputimes: ... +def cpu_times(percpu: Literal[False] = False) -> _ntp.scputimes: ... @overload -def cpu_times(percpu: Literal[True]) -> list[scputimes]: ... +def cpu_times(percpu: Literal[True]) -> list[_ntp.scputimes]: ... @overload def cpu_percent(interval: float | None = None, percpu: Literal[False] = False) -> float: ... @overload @@ -265,27 +243,29 @@ def cpu_percent(interval: float | None, percpu: Literal[True]) -> list[float]: . @overload def cpu_percent(*, percpu: Literal[True]) -> list[float]: ... @overload -def cpu_times_percent(interval: float | None = None, percpu: Literal[False] = False) -> scputimes: ... +def cpu_times_percent(interval: float | None = None, percpu: Literal[False] = False) -> _ntp.scputimes: ... @overload -def cpu_times_percent(interval: float | None, percpu: Literal[True]) -> list[scputimes]: ... +def cpu_times_percent(interval: float | None, percpu: Literal[True]) -> list[_ntp.scputimes]: ... @overload -def cpu_times_percent(*, percpu: Literal[True]) -> list[scputimes]: ... -def cpu_stats() -> scpustats: ... +def cpu_times_percent(*, percpu: Literal[True]) -> list[_ntp.scputimes]: ... +def cpu_stats() -> _ntp.scpustats: ... def getloadavg() -> tuple[float, float, float]: ... -def virtual_memory() -> svmem: ... -def swap_memory() -> sswap: ... -def disk_usage(path: str) -> sdiskusage: ... -def disk_partitions(all: bool = False) -> list[sdiskpart]: ... +def virtual_memory() -> _ntp.svmem: ... +def swap_memory() -> _ntp.sswap: ... +def disk_usage(path: str) -> _ntp.sdiskusage: ... +def disk_partitions(all: bool = False) -> list[_ntp.sdiskpart]: ... + +# TODO: Incorrect sdiskio for BSD systems: @overload -def disk_io_counters(perdisk: Literal[False] = False, nowrap: bool = True) -> sdiskio | None: ... +def disk_io_counters(perdisk: Literal[False] = False, nowrap: bool = True) -> _ntp.sdiskio | None: ... @overload -def disk_io_counters(perdisk: Literal[True], nowrap: bool = True) -> dict[str, sdiskio]: ... +def disk_io_counters(perdisk: Literal[True], nowrap: bool = True) -> dict[str, _ntp.sdiskio]: ... @overload -def net_io_counters(pernic: Literal[False] = False, nowrap: bool = True) -> snetio: ... +def net_io_counters(pernic: Literal[False] = False, nowrap: bool = True) -> _ntp.snetio: ... @overload -def net_io_counters(pernic: Literal[True], nowrap: bool = True) -> dict[str, snetio]: ... -def net_connections(kind: str = "inet") -> list[sconn]: ... -def net_if_addrs() -> dict[str, list[snicaddr]]: ... -def net_if_stats() -> dict[str, snicstats]: ... +def net_io_counters(pernic: Literal[True], nowrap: bool = True) -> dict[str, _ntp.snetio]: ... +def net_connections(kind: str = "inet") -> list[_ntp.sconn]: ... +def net_if_addrs() -> dict[str, list[_ntp.snicaddr]]: ... +def net_if_stats() -> dict[str, _ntp.snicstats]: ... def boot_time() -> float: ... -def users() -> list[suser]: ... +def users() -> list[_ntp.suser]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi index 2c3813b79c..8f952d28bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi @@ -1,286 +1,179 @@ import enum -from _typeshed import StrOrBytesPath, SupportsWrite +import io +import sys +import threading +from _typeshed import ConvertibleToFloat, FileDescriptorOrPath, Incomplete, StrOrBytesPath, SupportsWrite +from collections import defaultdict from collections.abc import Callable from socket import AF_INET6 as AF_INET6, AddressFamily, SocketKind -from typing import Any, Literal, NamedTuple, TypeVar, overload - -POSIX: bool -WINDOWS: bool -LINUX: bool -MACOS: bool -OSX: bool -FREEBSD: bool -OPENBSD: bool -NETBSD: bool -BSD: bool -SUNOS: bool -AIX: bool - -STATUS_RUNNING: Literal["running"] -STATUS_SLEEPING: Literal["sleeping"] -STATUS_DISK_SLEEP: Literal["disk-sleep"] -STATUS_STOPPED: Literal["stopped"] -STATUS_TRACING_STOP: Literal["tracing-stop"] -STATUS_ZOMBIE: Literal["zombie"] -STATUS_DEAD: Literal["dead"] -STATUS_WAKE_KILL: Literal["wake-kill"] -STATUS_WAKING: Literal["waking"] -STATUS_IDLE: Literal["idle"] -STATUS_LOCKED: Literal["locked"] -STATUS_WAITING: Literal["waiting"] -STATUS_SUSPENDED: Literal["suspended"] -STATUS_PARKED: Literal["parked"] - -CONN_ESTABLISHED: str -CONN_SYN_SENT: str -CONN_SYN_RECV: str -CONN_FIN_WAIT1: str -CONN_FIN_WAIT2: str -CONN_TIME_WAIT: str -CONN_CLOSE: str -CONN_CLOSE_WAIT: str -CONN_LAST_ACK: str -CONN_LISTEN: str -CONN_CLOSING: str -CONN_NONE: str -NIC_DUPLEX_FULL: int -NIC_DUPLEX_HALF: int -NIC_DUPLEX_UNKNOWN: int +from typing import BinaryIO, Final, SupportsIndex, TypeVar, overload +from typing_extensions import ParamSpec + +from . import _ntuples as ntp + +POSIX: Final[bool] +WINDOWS: Final[bool] +LINUX: Final[bool] +MACOS: Final[bool] +OSX: Final[bool] +FREEBSD: Final[bool] +OPENBSD: Final[bool] +NETBSD: Final[bool] +BSD: Final[bool] +SUNOS: Final[bool] +AIX: Final[bool] + +STATUS_RUNNING: Final = "running" +STATUS_SLEEPING: Final = "sleeping" +STATUS_DISK_SLEEP: Final = "disk-sleep" +STATUS_STOPPED: Final = "stopped" +STATUS_TRACING_STOP: Final = "tracing-stop" +STATUS_ZOMBIE: Final = "zombie" +STATUS_DEAD: Final = "dead" +STATUS_WAKE_KILL: Final = "wake-kill" +STATUS_WAKING: Final = "waking" +STATUS_IDLE: Final = "idle" +STATUS_LOCKED: Final = "locked" +STATUS_WAITING: Final = "waiting" +STATUS_SUSPENDED: Final = "suspended" +STATUS_PARKED: Final = "parked" + +CONN_ESTABLISHED: Final = "ESTABLISHED" +CONN_SYN_SENT: Final = "SYN_SENT" +CONN_SYN_RECV: Final = "SYN_RECV" +CONN_FIN_WAIT1: Final = "FIN_WAIT1" +CONN_FIN_WAIT2: Final = "FIN_WAIT2" +CONN_TIME_WAIT: Final = "TIME_WAIT" +CONN_CLOSE: Final = "CLOSE" +CONN_CLOSE_WAIT: Final = "CLOSE_WAIT" +CONN_LAST_ACK: Final = "LAST_ACK" +CONN_LISTEN: Final = "LISTEN" +CONN_CLOSING: Final = "CLOSING" +CONN_NONE: Final = "NONE" class NicDuplex(enum.IntEnum): NIC_DUPLEX_FULL = 2 NIC_DUPLEX_HALF = 1 NIC_DUPLEX_UNKNOWN = 0 -POWER_TIME_UNKNOWN: int -POWER_TIME_UNLIMITED: int +NIC_DUPLEX_FULL: Final = NicDuplex.NIC_DUPLEX_FULL +NIC_DUPLEX_HALF: Final = NicDuplex.NIC_DUPLEX_HALF +NIC_DUPLEX_UNKNOWN: Final = NicDuplex.NIC_DUPLEX_UNKNOWN class BatteryTime(enum.IntEnum): POWER_TIME_UNKNOWN = -1 POWER_TIME_UNLIMITED = -2 -ENCODING: str -ENCODING_ERRS: str - -class sswap(NamedTuple): - total: int - used: int - free: int - percent: float - sin: int - sout: int - -class sdiskusage(NamedTuple): - total: int - used: int - free: int - percent: float - -class sdiskio(NamedTuple): - read_count: int - write_count: int - read_bytes: int - write_bytes: int - read_time: int - write_time: int - -class sdiskpart(NamedTuple): - device: str - mountpoint: str - fstype: str - opts: str - -class snetio(NamedTuple): - bytes_sent: int - bytes_recv: int - packets_sent: int - packets_recv: int - errin: int - errout: int - dropin: int - dropout: int - -class suser(NamedTuple): - name: str - terminal: str | None - host: str | None - started: float - pid: str - -class sconn(NamedTuple): - fd: int - family: AddressFamily - type: SocketKind - laddr: addr | tuple[()] - raddr: addr | tuple[()] - status: str - pid: int | None - -class snicaddr(NamedTuple): - family: AddressFamily - address: str - netmask: str | None - broadcast: str | None - ptp: str | None - -class snicstats(NamedTuple): - isup: bool - duplex: int - speed: int - mtu: int - flags: str - -class scpustats(NamedTuple): - ctx_switches: int - interrupts: int - soft_interrupts: int - syscalls: int - -class scpufreq(NamedTuple): - current: float - min: float - max: float - -class shwtemp(NamedTuple): - label: str - current: float - high: float | None - critical: float | None - -class sbattery(NamedTuple): - percent: int - secsleft: int - power_plugged: bool - -class sfan(NamedTuple): - label: str - current: int +POWER_TIME_UNKNOWN: Final = BatteryTime.POWER_TIME_UNKNOWN +POWER_TIME_UNLIMITED: Final = BatteryTime.POWER_TIME_UNLIMITED -class pcputimes(NamedTuple): - user: float - system: float - children_user: float - children_system: float - -class popenfile(NamedTuple): - path: str - fd: int - -class pthread(NamedTuple): - id: int - user_time: float - system_time: float - -class puids(NamedTuple): - real: int - effective: int - saved: int - -class pgids(NamedTuple): - real: int - effective: int - saved: int - -class pio(NamedTuple): - read_count: int - write_count: int - read_bytes: int - write_bytes: int - -class pionice(NamedTuple): - ioclass: int - value: int - -class pctxsw(NamedTuple): - voluntary: int - involuntary: int - -class pconn(NamedTuple): - fd: int - family: AddressFamily - type: SocketKind - laddr: addr - raddr: addr - status: str - -class addr(NamedTuple): - ip: str - port: int +ENCODING: Final[str] +ENCODING_ERRS: Final[str] conn_tmap: dict[str, tuple[list[AddressFamily], list[SocketKind]]] -class Error(Exception): - __module__: str - msg: Any - def __init__(self, msg: str = ...) -> None: ... +class Error(Exception): ... class NoSuchProcess(Error): - __module__: str - pid: Any - name: Any - msg: Any - def __init__(self, pid, name=None, msg=None) -> None: ... + pid: int + name: str | None + msg: str + def __init__(self, pid: int, name: str | None = None, msg: str | None = None) -> None: ... class ZombieProcess(NoSuchProcess): - __module__: str - pid: Any - ppid: Any - name: Any - msg: Any - def __init__(self, pid, name=None, ppid=None, msg=None) -> None: ... + ppid: int | None + def __init__(self, pid: int, name: str | None = None, ppid: int | None = None, msg: str | None = None) -> None: ... class AccessDenied(Error): - __module__: str - pid: Any - name: Any - msg: Any - def __init__(self, pid=None, name=None, msg=None) -> None: ... + pid: int | None + name: str | None + msg: str + def __init__(self, pid: int | None = None, name: str | None = None, msg: str | None = None) -> None: ... class TimeoutExpired(Error): - __module__: str - seconds: Any - pid: Any - name: Any - def __init__(self, seconds, pid=None, name=None) -> None: ... + seconds: float + pid: int | None + name: str | None + msg: str + def __init__(self, seconds: float, pid: int | None = None, name: str | None = None) -> None: ... + +_P = ParamSpec("_P") +_R = TypeVar("_R") +_T = TypeVar("_T") -_Func = TypeVar("_Func", bound=Callable[..., Any]) +def usage_percent(used: ConvertibleToFloat, total: float, round_: SupportsIndex | None = None) -> float: ... -def usage_percent(used, total, round_: int | None = None) -> float: ... -def memoize(fun: _Func) -> _Func: ... -def memoize_when_activated(fun: _Func) -> _Func: ... +# returned function has `cache_clear()` attribute: +def memoize(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + +# returned function has `cache_activate(proc)` and `cache_deactivate(proc)` attributes: +def memoize_when_activated(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... def isfile_strict(path: StrOrBytesPath) -> bool: ... def path_exists_strict(path: StrOrBytesPath) -> bool: ... def supports_ipv6() -> bool: ... -def parse_environ_block(data): ... +def parse_environ_block(data: str) -> dict[str, str]: ... def sockfam_to_enum(num: int) -> AddressFamily: ... def socktype_to_enum(num: int) -> SocketKind: ... @overload -def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: int) -> sconn: ... +def conn_to_ntuple( + fd: int, + fam: int, + type_: int, + laddr: ntp.addr | tuple[str, int] | tuple[()], + raddr: ntp.addr | tuple[str, int] | tuple[()], + status: int | str, + status_map: dict[int, str] | dict[str, str], + pid: int, +) -> ntp.sconn: ... @overload -def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: None = None) -> pconn: ... -def deprecated_method(replacement: str) -> Callable[[_Func], _Func]: ... +def conn_to_ntuple( + fd: int, + fam: int, + type_: int, + laddr: ntp.addr | tuple[str, int] | tuple[()], + raddr: ntp.addr | tuple[str, int] | tuple[()], + status: int | str, + status_map: dict[int, str] | dict[str, str], + pid: None = None, +) -> ntp.pconn: ... +def deprecated_method(replacement: str) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ... class _WrapNumbers: - lock: Any - cache: Any - reminders: Any - reminder_keys: Any + lock: threading.Lock + cache: dict[str, dict[str, tuple[int, ...]]] + reminders: dict[str, defaultdict[Incomplete, int]] + reminder_keys: dict[str, defaultdict[Incomplete, set[Incomplete]]] def __init__(self) -> None: ... - def run(self, input_dict, name): ... - def cache_clear(self, name=None) -> None: ... - def cache_info(self): ... - -def wrap_numbers(input_dict, name: str): ... -def open_binary(fname): ... -def open_text(fname): ... -def cat(fname, fallback=..., _open=...): ... -def bcat(fname, fallback=...): ... + def run(self, input_dict: dict[str, tuple[int, ...]], name: str) -> dict[str, tuple[int, ...]]: ... + def cache_clear(self, name: str | None = None) -> None: ... + def cache_info( + self, + ) -> tuple[ + dict[str, dict[str, tuple[int, ...]]], + dict[str, defaultdict[Incomplete, int]], + dict[str, defaultdict[Incomplete, set[Incomplete]]], + ]: ... + +def wrap_numbers(input_dict: dict[str, tuple[int, ...]], name: str) -> dict[str, tuple[int, ...]]: ... +def open_binary(fname: FileDescriptorOrPath) -> BinaryIO: ... +def open_text(fname: FileDescriptorOrPath) -> io.TextIOWrapper: ... +@overload +def cat(fname: FileDescriptorOrPath, _open: Callable[[FileDescriptorOrPath], io.TextIOWrapper] = ...) -> str: ... +@overload +def cat( + fname: FileDescriptorOrPath, fallback: _T = ..., _open: Callable[[FileDescriptorOrPath], io.TextIOWrapper] = ... +) -> str | _T: ... +@overload +def bcat(fname: FileDescriptorOrPath) -> str: ... +@overload +def bcat(fname: FileDescriptorOrPath, fallback: _T = ...) -> str | _T: ... def bytes2human(n: int, format: str = "%(value).1f%(symbol)s") -> str: ... def get_procfs_path() -> str: ... -def term_supports_colors(file: SupportsWrite[str] = ...) -> bool: ... +def decode(s: bytes) -> str: ... +def term_supports_colors(file: SupportsWrite[str] = sys.stdout) -> bool: ... def hilite(s: str, color: str | None = None, bold: bool = False) -> str: ... -def print_color(s: str, color: str | None = None, bold: bool = False, file: SupportsWrite[str] = ...) -> None: ... -def debug(msg) -> None: ... +def print_color(s: str, color: str | None = None, bold: bool = False, file: SupportsWrite[str] = sys.stdout) -> None: ... +def debug(msg: str | Exception) -> None: ... __all__ = [ # OS constants @@ -330,26 +223,6 @@ __all__ = [ "ENCODING", "ENCODING_ERRS", "AF_INET6", - # named tuples - "pconn", - "pcputimes", - "pctxsw", - "pgids", - "pio", - "pionice", - "popenfile", - "pthread", - "puids", - "sconn", - "scpustats", - "sdiskio", - "sdiskpart", - "sdiskusage", - "snetio", - "snicaddr", - "snicstats", - "sswap", - "suser", # utility functions "conn_tmap", "deprecated_method", diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_ntuples.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_ntuples.pyi new file mode 100644 index 0000000000..b61b6ac2db --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_ntuples.pyi @@ -0,0 +1,384 @@ +import sys +from _typeshed import Incomplete +from socket import AddressFamily, SocketKind +from typing import Any, NamedTuple + +# All named tuples are defined in this file, but due to the inability to detect some platforms, +# it was decided to store the correct named tuples inside platform-specific files. + +class sswap(NamedTuple): + total: int + used: int + free: int + percent: float + sin: int + sout: int + +class sdiskusage(NamedTuple): + total: int + used: int + free: int + percent: float + +# redefine for linux: +if sys.platform != "linux": + class sdiskio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + read_time: int + write_time: int + +class sdiskpart(NamedTuple): + device: str + mountpoint: str + fstype: str + opts: str + +class snetio(NamedTuple): + bytes_sent: int + bytes_recv: int + packets_sent: int + packets_recv: int + errin: int + errout: int + dropin: int + dropout: int + +class suser(NamedTuple): + name: str + terminal: str | None + host: str | None + started: float + pid: str + +class sconn(NamedTuple): + fd: int + family: AddressFamily + type: SocketKind + laddr: addr | tuple[()] + raddr: addr | tuple[()] + status: str + pid: int | None + +class snicaddr(NamedTuple): + family: AddressFamily + address: str + netmask: str | None + broadcast: str | None + ptp: str | None + +class snicstats(NamedTuple): + isup: bool + duplex: int + speed: int + mtu: int + flags: str + +class scpustats(NamedTuple): + ctx_switches: int + interrupts: int + soft_interrupts: int + syscalls: int + +class scpufreq(NamedTuple): + current: float + min: float + max: float + +class shwtemp(NamedTuple): + label: str + current: float + high: float | None + critical: float | None + +class sbattery(NamedTuple): + percent: int + secsleft: int + power_plugged: bool + +class sfan(NamedTuple): + label: str + current: int + +if sys.platform == "win32": + class pheap(NamedTuple): + heap_used: Incomplete + mmap_used: Incomplete + heap_count: Incomplete + +else: + # if LINUX or MACOS or BSD: + class pheap(NamedTuple): + heap_used: Incomplete + mmap_used: Incomplete + +# redefine for linux: +if sys.platform != "linux": + class pcputimes(NamedTuple): + user: float + system: float + children_user: float + children_system: float + + class popenfile(NamedTuple): + path: str + fd: int + +class pthread(NamedTuple): + id: int + user_time: float + system_time: float + +class puids(NamedTuple): + real: int + effective: int + saved: int + +class pgids(NamedTuple): + real: int + effective: int + saved: int + +# redefine for linux and windows: +if sys.platform != "linux" and sys.platform != "win32": + class pio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + +class pionice(NamedTuple): + ioclass: int + value: int + +class pctxsw(NamedTuple): + voluntary: int + involuntary: int + +class pconn(NamedTuple): + fd: int + family: AddressFamily + type: SocketKind + laddr: addr + raddr: addr + status: str + +class addr(NamedTuple): + ip: str + port: int + +if sys.platform == "linux": + class scputimes(NamedTuple): + # Note: scputimes has different fields depending on exactly how Linux + # is setup, but we'll include the "complete" set of fields + user: float + nice: float + system: float + idle: float + iowait: float + irq: float + softirq: float + steal: float + guest: float + guest_nice: float + + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + buffers: int + cached: int + shared: int + slab: int + + class sdiskio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + read_time: int + write_time: int + read_merged_count: int + write_merged_count: int + busy_time: int + + class popenfile(NamedTuple): + path: str + fd: int + position: int + mode: str + flags: int + + class pmem(NamedTuple): + rss: int + vms: int + shared: int + text: int + lib: int + data: int + dirty: int + + class pfullmem(NamedTuple): + rss: int + vms: int + shared: int + text: int + lib: int + data: int + dirty: int + uss: int + pss: int + swap: int + + class pmmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + size: Incomplete + pss: Incomplete + shared_clean: Incomplete + shared_dirty: Incomplete + private_clean: Incomplete + private_dirty: Incomplete + referenced: Incomplete + anonymous: Incomplete + swap: Incomplete + + class pmmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + size: Incomplete + pss: Incomplete + shared_clean: Incomplete + shared_dirty: Incomplete + private_clean: Incomplete + private_dirty: Incomplete + referenced: Incomplete + anonymous: Incomplete + swap: Incomplete + + class pio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + read_chars: int + write_chars: int + + class pcputimes(NamedTuple): + user: float + system: float + children_user: float + children_system: float + iowait: float + +elif sys.platform == "win32": + class scputimes(NamedTuple): + user: float + system: float + idle: float + interrupt: float + dpc: float + + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + + class pmem(NamedTuple): + rss: int + vms: int + num_page_faults: int + peak_wset: int + wset: int + peak_paged_pool: int + paged_pool: int + peak_nonpaged_pool: int + nonpaged_pool: int + pagefile: int + peak_pagefile: int + private: int + + class pfullmem(NamedTuple): + rss: int + vms: int + num_page_faults: int + peak_wset: int + wset: int + peak_paged_pool: int + paged_pool: int + peak_nonpaged_pool: int + nonpaged_pool: int + pagefile: int + peak_pagefile: int + private: int + uss: int + + class pmmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + + class pmmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + + class pio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + other_count: int + other_bytes: int + +elif sys.platform == "darwin": + class scputimes(NamedTuple): + user: float + nice: float + system: float + idle: float + + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + wired: int + + class pmem(NamedTuple): + rss: int + vms: int + pfaults: int + pageins: int + + class pfullmem(NamedTuple): + rss: int + vms: int + pfaults: int + pageins: int + uss: int + +else: + # See _psbsd.pyi, _pssunos.pyi or _psaix.pyi + # BSD: svmem, scputimes, pmem, pfullmem, pcputimes, pmmap_grouped, pmmap_ext, sdiskio + # SUNOS: scputimes, pcputimes, svmem, pmem, pfullmem, pmmap_grouped, pmmap_ext + # AIX: pmem, pfullmem, scputimes, svmem + + scputimes = Incomplete + + class pmem(Any): ... + class pfullmem(Any): ... + class svmem(Any): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psaix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psaix.pyi index 7192f40df8..aab5c6eb32 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psaix.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psaix.pyi @@ -1,98 +1,112 @@ -from _typeshed import Incomplete -from typing import NamedTuple +import sys -from psutil._common import ( - NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, - NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, - NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - ZombieProcess as ZombieProcess, - conn_to_ntuple as conn_to_ntuple, - get_procfs_path as get_procfs_path, - memoize_when_activated as memoize_when_activated, - usage_percent as usage_percent, -) +# sys.platform.startswith("aix"): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from collections.abc import Callable + from typing import Final, Literal, NamedTuple, TypeVar, overload + from typing_extensions import ParamSpec -__extra__all__: Incomplete -HAS_THREADS: Incomplete -HAS_NET_IO_COUNTERS: Incomplete -HAS_PROC_IO_COUNTERS: Incomplete -PAGE_SIZE: Incomplete -AF_LINK: Incomplete -PROC_STATUSES: Incomplete -TCP_STATUSES: Incomplete -proc_info_map: Incomplete + from psutil._common import ( + NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, + NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, + NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_to_ntuple as conn_to_ntuple, + get_procfs_path as get_procfs_path, + memoize_when_activated as memoize_when_activated, + usage_percent as usage_percent, + ) -class pmem(NamedTuple): - rss: Incomplete - vms: Incomplete + from . import _ntuples as ntp, _psposix, _psutil_aix -pfullmem = pmem + __extra__all__: Final[list[str]] + HAS_THREADS: Final[bool] + HAS_NET_IO_COUNTERS: Final[bool] + HAS_PROC_IO_COUNTERS: Final[bool] + PAGE_SIZE: Final[int] + AF_LINK: Final = 18 + PROC_STATUSES: Final[dict[int, str]] + TCP_STATUSES: Final[dict[int, str]] + proc_info_map: Final[dict[str, int]] -class scputimes(NamedTuple): - user: float - system: float - idle: float - iowait: float + class pmem(NamedTuple): + rss: int + vms: int -class svmem(NamedTuple): - total: Incomplete - available: Incomplete - percent: Incomplete - used: Incomplete - free: Incomplete + pfullmem = pmem -def virtual_memory(): ... -def swap_memory(): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores(): ... -def cpu_stats(): ... + class scputimes(NamedTuple): + user: float + system: float + idle: float + iowait: float -disk_io_counters: Incomplete -disk_usage: Incomplete + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int -def disk_partitions(all: bool = ...): ... + _P = ParamSpec("_P") + _R = TypeVar("_R") -net_if_addrs: Incomplete -net_io_counters: Incomplete + def virtual_memory() -> svmem: ... + def swap_memory() -> ntp.sswap: ... + def cpu_times() -> scputimes: ... + def per_cpu_times() -> list[scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... -def net_connections(kind, _pid: int = ...): ... -def net_if_stats(): ... -def boot_time(): ... -def users(): ... -def pids(): ... -def pid_exists(pid): ... -def wrap_exceptions(fun): ... + disk_io_counters = _psutil_aix.disk_io_counters + disk_usage = _psposix.disk_usage -class Process: - pid: Incomplete - def __init__(self, pid) -> None: ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def create_time(self): ... - def num_threads(self): ... - def threads(self): ... - def net_connections(self, kind: str = ...): ... - def nice_get(self): ... - def nice_set(self, value): ... - def ppid(self): ... - def uids(self): ... - def gids(self): ... - def cpu_times(self): ... - def terminal(self): ... - def cwd(self): ... - def memory_info(self): ... - memory_full_info: Incomplete - def status(self): ... - def open_files(self): ... - def num_fds(self): ... - def num_ctx_switches(self): ... - def wait(self, timeout: Incomplete | None = ...): ... - def io_counters(self): ... + def disk_partitions(all: bool = False) -> list[ntp.sdiskpart]: ... + + net_if_addrs = _psutil_aix.net_if_addrs + net_io_counters = _psutil_aix.net_io_counters + + @overload + def net_connections(kind: str, _pid: Literal[-1] = -1) -> list[ntp.sconn]: ... + @overload + def net_connections(kind: str, _pid: int = -1) -> list[ntp.pconn]: ... + def net_if_stats() -> dict[str, ntp.snicstats]: ... + def boot_time() -> float: ... + def users() -> list[ntp.suser]: ... + def pids() -> list[int]: ... + def pid_exists(pid: int | str) -> bool: ... + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + + class Process: + __slots__ = ["_cache", "_name", "_ppid", "_procfs_path", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def environ(self) -> dict[str, str]: ... + def create_time(self) -> float: ... + def num_threads(self) -> int: ... + def threads(self) -> list[ntp.pthread]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + def nice_get(self) -> int: ... + def nice_set(self, value: int) -> None: ... + def ppid(self) -> int: ... + def uids(self) -> ntp.puids: ... + def gids(self) -> ntp.puids: ... + def cpu_times(self) -> ntp.pcputimes: ... + def terminal(self) -> str | None: ... + def cwd(self) -> str: ... + def memory_info(self) -> pmem: ... + memory_full_info = memory_info + def status(self) -> str: ... + def open_files(self) -> list[ntp.popenfile]: ... + def num_fds(self) -> int: ... + def num_ctx_switches(self) -> ntp.pctxsw: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def io_counters(self) -> ntp.pio: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi index 4d17285a07..36297b70fc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi @@ -1,168 +1,213 @@ -from _typeshed import Incomplete -from contextlib import AbstractContextManager -from typing import Any, NamedTuple - -from psutil._common import ( - FREEBSD as FREEBSD, - NETBSD as NETBSD, - OPENBSD as OPENBSD, - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - ZombieProcess as ZombieProcess, - conn_tmap as conn_tmap, - conn_to_ntuple as conn_to_ntuple, - memoize as memoize, - pio, - usage_percent as usage_percent, -) - -__extra__all__: Any -PROC_STATUSES: Any -TCP_STATUSES: Any -PAGESIZE: Any -AF_LINK: Any -HAS_PER_CPU_TIMES: Any -HAS_PROC_NUM_THREADS: Any -HAS_PROC_OPEN_FILES: Any -HAS_PROC_NUM_FDS: Any -kinfo_proc_map: Any - -class svmem(NamedTuple): - total: int - available: int - percent: float - used: int - free: int - active: int - inactive: int - buffers: int - cached: int - shared: int - wired: int - -class scputimes(NamedTuple): - user: float - nice: float - system: float - idle: float - irq: float - -class pmem(NamedTuple): - rss: Any - vms: Any - text: Any - data: Any - stack: Any - -pfullmem = pmem - -class pcputimes(NamedTuple): - user: Any - system: Any - children_user: Any - children_system: Any - -class pmmap_grouped(NamedTuple): - path: Any - rss: Any - private: Any - ref_count: Any - shadow_count: Any - -class pmmap_ext(NamedTuple): - addr: Any - perms: Any - path: Any - rss: Any - private: Any - ref_count: Any - shadow_count: Any - -class sdiskio(NamedTuple): - read_count: Any - write_count: Any - read_bytes: Any - write_bytes: Any - read_time: Any - write_time: Any - busy_time: Any - -def virtual_memory() -> svmem: ... -def swap_memory(): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores() -> int | None: ... -def cpu_stats(): ... -def disk_partitions(all: bool = ...): ... - -disk_usage: Any -disk_io_counters: Any -net_io_counters: Any -net_if_addrs: Any - -def net_if_stats(): ... -def net_connections(kind): ... -def sensors_battery(): ... -def sensors_temperatures(): ... -def cpu_freq(): ... -def boot_time(): ... -def users(): ... -def pids(): ... -def pid_exists(pid): ... -def is_zombie(pid): ... -def wrap_exceptions(fun): ... -def wrap_exceptions_procfs(inst) -> AbstractContextManager[None]: ... - -class Process: - pid: Any - def __init__(self, pid) -> None: ... - def oneshot(self): ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def terminal(self): ... - def ppid(self): ... - def uids(self): ... - def gids(self): ... - def cpu_times(self): ... - def cpu_num(self): ... - def memory_info(self): ... - memory_full_info: Any - def create_time(self): ... - def num_threads(self): ... - def num_ctx_switches(self): ... - def threads(self): ... - def net_connections(self, kind: str = ...): ... - def wait(self, timeout: Incomplete | None = ...): ... - def nice_get(self): ... - def nice_set(self, value): ... - def status(self): ... - def io_counters(self) -> pio: ... - def cwd(self): ... - - class nt_mmap_grouped(NamedTuple): - path: Any - rss: Any - private: Any - ref_count: Any - shadow_count: Any - - class nt_mmap_ext(NamedTuple): - addr: Any - perms: Any - path: Any - rss: Any - private: Any - ref_count: Any - shadow_count: Any - - def open_files(self): ... - def num_fds(self): ... - def cpu_affinity_get(self): ... - def cpu_affinity_set(self, cpus) -> None: ... - def memory_maps(self): ... - def rlimit(self, resource, limits: Incomplete | None = ...): ... +import sys + +# sys.platform.startswith(("freebsd", "midnightbsd", "openbsd", "netbsd")): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from _typeshed import Incomplete + from collections import defaultdict + from collections.abc import Callable + from contextlib import AbstractContextManager + from typing import Final, NamedTuple, TypeVar, overload + from typing_extensions import ParamSpec + + from psutil._common import ( + FREEBSD as FREEBSD, + NETBSD as NETBSD, + OPENBSD as OPENBSD, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + memoize as memoize, + usage_percent as usage_percent, + ) + + from . import _ntuples as ntp, _psposix, _psutil_bsd + + _P = ParamSpec("_P") + _R = TypeVar("_R") + + __extra__all__: Final[list[str]] + PROC_STATUSES: Final[dict[int, str]] + TCP_STATUSES: Final[dict[int, str]] + PAGESIZE: Final[int] + AF_LINK: Final = _psutil_bsd.AF_LINK + HAS_PROC_NUM_THREADS: Final[bool] + kinfo_proc_map: Final[dict[str, int]] + + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + buffers: int + cached: int + shared: int + wired: int + + class scputimes(NamedTuple): + user: float + nice: float + system: float + idle: float + irq: float + + class pmem(NamedTuple): + rss: int + vms: int + text: int + data: int + stack: int + + pfullmem = pmem + + class pcputimes(NamedTuple): + user: float + system: float + children_user: float + children_system: float + + class pmmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + private: Incomplete + ref_count: Incomplete + shadow_count: Incomplete + + class pmmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + private: Incomplete + ref_count: Incomplete + shadow_count: Incomplete + + class sdiskio(NamedTuple): + read_count: Incomplete + write_count: Incomplete + read_bytes: Incomplete + write_bytes: Incomplete + read_time: Incomplete + write_time: Incomplete + busy_time: Incomplete + + def virtual_memory() -> svmem: ... + def swap_memory() -> ntp.sswap: ... + heap_info = _psutil_bsd.heap_info # only FreeBSD and NetBSD + heap_trim = _psutil_bsd.heap_trim # only FreeBSD and NetBSD + def cpu_times() -> scputimes: ... + def per_cpu_times() -> list[scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... + def disk_partitions(all: bool = False) -> list[ntp.sdiskpart]: ... + + disk_usage = _psposix.disk_usage + disk_io_counters = _psutil_bsd.disk_io_counters + net_io_counters = _psutil_bsd.net_io_counters + net_if_addrs = _psutil_bsd.net_if_addrs + + def net_if_stats() -> dict[str, ntp.snicstats]: ... + def net_connections(kind: str) -> list[ntp.sconn]: ... + def sensors_battery() -> ntp.sbattery | None: ... # only FreeBSD + def sensors_temperatures() -> defaultdict[str, list[ntp.shwtemp]]: ... # only FreeBSD + def cpu_freq() -> list[ntp.scpufreq]: ... # only FreeBSD and OpenBSD + def boot_time() -> float: ... + def users() -> list[ntp.suser]: ... + + INIT_BOOT_TIME: Final[float] # only NetBSD + + def adjust_proc_create_time(ctime: float) -> float: ... # only NetBSD + def pids() -> list[int]: ... + def pid_exists(pid: int) -> bool: ... + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + def wrap_exceptions_procfs(inst: Process) -> AbstractContextManager[None]: ... + + class Process: + __slots__ = ["_cache", "_name", "_ppid", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot( + self, + ) -> tuple[ + int, + int, + int, + int, + int, + int, + int, + int, + int, + float, + int, + int, + int, + int, + float, + float, + float, + float, + int, + int, + int, + int, + int, + int, + str, + ]: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def environ(self) -> dict[str, str]: ... + def terminal(self) -> str | None: ... + def ppid(self) -> int: ... + def uids(self) -> ntp.puids: ... + def gids(self) -> ntp.pgids: ... + def cpu_times(self) -> ntp.pcputimes: ... + def cpu_num(self) -> int: ... # only FreeBSD + def memory_info(self) -> pmem: ... + memory_full_info = memory_info + def create_time(self, monotonic: bool = False) -> float: ... + def num_threads(self) -> int: ... + def num_ctx_switches(self) -> ntp.pctxsw: ... + def threads(self) -> list[ntp.pthread]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def nice_get(self) -> int: ... + def nice_set(self, value: int) -> None: ... + def status(self) -> str: ... + def io_counters(self) -> ntp.pio: ... + def cwd(self) -> str: ... + + class nt_mmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + private: Incomplete + ref_count: Incomplete + shadow_count: Incomplete + + class nt_mmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + private: Incomplete + ref_count: Incomplete + shadow_count: Incomplete + + def open_files(self) -> list[ntp.popenfile]: ... + def num_fds(self) -> int: ... + def cpu_affinity_get(self) -> list[int]: ... # only FreeBSD + def cpu_affinity_set(self, cpus: list[int]) -> None: ... # only FreeBSD + def memory_maps(self) -> list[tuple[str, str, str, int, int, int, int]]: ... # only FreeBSD + @overload + def rlimit(self, resource: int, limits: tuple[int, int]) -> None: ... # only FreeBSD + @overload + def rlimit(self, resource: int, limits: None = None) -> tuple[int, int]: ... # only FreeBSD diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pslinux.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pslinux.pyi index 1a947fc2b8..7d538282d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pslinux.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pslinux.pyi @@ -1,237 +1,181 @@ -import enum -from _typeshed import Incomplete -from typing import Any, NamedTuple - -from psutil._common import ( - NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, - NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, - NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - ZombieProcess as ZombieProcess, - isfile_strict as isfile_strict, - parse_environ_block as parse_environ_block, - path_exists_strict as path_exists_strict, - supports_ipv6 as supports_ipv6, - usage_percent as usage_percent, -) - -__extra__all__: Any -POWER_SUPPLY_PATH: str -HAS_PROC_SMAPS: bool -HAS_PROC_SMAPS_ROLLUP: bool -HAS_PROC_IO_PRIORITY: Any -HAS_CPU_AFFINITY: Any -CLOCK_TICKS: Any -PAGESIZE: Any -BOOT_TIME: Any -LITTLE_ENDIAN: Any -DISK_SECTOR_SIZE: int -AF_LINK: Any -AddressFamily: Any -IOPRIO_CLASS_NONE: int -IOPRIO_CLASS_RT: int -IOPRIO_CLASS_BE: int -IOPRIO_CLASS_IDLE: int - -class IOPriority(enum.IntEnum): - IOPRIO_CLASS_NONE = 0 - IOPRIO_CLASS_RT = 1 - IOPRIO_CLASS_BE = 2 - IOPRIO_CLASS_IDLE = 3 - -PROC_STATUSES: Any -TCP_STATUSES: Any - -class svmem(NamedTuple): - total: int - available: int - percent: float - used: int - free: int - active: int - inactive: int - buffers: int - cached: int - shared: int - slab: int - -class sdiskio(NamedTuple): - read_count: Any - write_count: Any - read_bytes: Any - write_bytes: Any - read_time: Any - write_time: Any - read_merged_count: Any - write_merged_count: Any - busy_time: Any - -class popenfile(NamedTuple): - path: Any - fd: Any - position: Any - mode: Any - flags: Any - -class pmem(NamedTuple): - rss: Any - vms: Any - shared: Any - text: Any - lib: Any - data: Any - dirty: Any - -class pfullmem(NamedTuple): - rss: Incomplete - vms: Incomplete - shared: Incomplete - text: Incomplete - lib: Incomplete - data: Incomplete - dirty: Incomplete - uss: Incomplete - pss: Incomplete - swap: Incomplete - -class pmmap_grouped(NamedTuple): - path: Any - rss: Any - size: Any - pss: Any - shared_clean: Any - shared_dirty: Any - private_clean: Any - private_dirty: Any - referenced: Any - anonymous: Any - swap: Any - -pmmap_ext: Any - -class pio(NamedTuple): - read_count: Any - write_count: Any - read_bytes: Any - write_bytes: Any - read_chars: Any - write_chars: Any - -class pcputimes(NamedTuple): - user: float - system: float - children_user: float - children_system: float - iowait: float - -def readlink(path): ... -def file_flags_to_mode(flags): ... -def is_storage_device(name): ... -def set_scputimes_ntuple(procfs_path) -> None: ... - -class scputimes(NamedTuple): - # Note: scputimes has different fields depending on exactly how Linux - # is setup, but we'll include the "complete" set of fields - user: float - nice: float - system: float - idle: float - iowait: float - irq: float - softirq: float - steal: float - guest: float - guest_nice: float - -def calculate_avail_vmem(mems): ... -def virtual_memory() -> svmem: ... -def swap_memory(): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores() -> int | None: ... -def cpu_stats(): ... -def cpu_freq(): ... - -net_if_addrs: Any - -class _Ipv6UnsupportedError(Exception): ... - -class NetConnections: - tmap: Any - def __init__(self) -> None: ... - def get_proc_inodes(self, pid): ... - def get_all_inodes(self): ... - @staticmethod - def decode_address(addr, family): ... - @staticmethod - def process_inet(file, family, type_, inodes, filter_pid: Incomplete | None = ...) -> None: ... - @staticmethod - def process_unix(file, family, inodes, filter_pid: Incomplete | None = ...) -> None: ... - def retrieve(self, kind, pid: Incomplete | None = ...): ... - -def net_connections(kind: str = ...): ... -def net_io_counters(): ... -def net_if_stats(): ... - -disk_usage: Any - -def disk_io_counters(perdisk: bool = ...): ... - -class RootFsDeviceFinder: - major: Incomplete - minor: Incomplete - def __init__(self) -> None: ... - def ask_proc_partitions(self): ... - def ask_sys_dev_block(self): ... - def ask_sys_class_block(self): ... - def find(self): ... - -def disk_partitions(all: bool = ...): ... -def sensors_temperatures(): ... -def sensors_fans(): ... -def sensors_battery(): ... -def users(): ... -def boot_time(): ... -def pids(): ... -def pid_exists(pid): ... -def ppid_map(): ... -def wrap_exceptions(fun): ... - -class Process: - pid: Any - def __init__(self, pid) -> None: ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def terminal(self): ... - def io_counters(self) -> pio: ... - def cpu_times(self): ... - def cpu_num(self): ... - def wait(self, timeout: Incomplete | None = ...): ... - def create_time(self): ... - def memory_info(self): ... - def memory_full_info(self): ... - def memory_maps(self): ... - def cwd(self): ... - def num_ctx_switches(self, _ctxsw_re=...): ... - def num_threads(self, _num_threads_re=...): ... - def threads(self): ... - def nice_get(self): ... - def nice_set(self, value): ... - def cpu_affinity_get(self): ... - def cpu_affinity_set(self, cpus) -> None: ... - def ionice_get(self): ... - def ionice_set(self, ioclass, value): ... - def rlimit(self, resource_, limits: Incomplete | None = ...): ... - def status(self): ... - def open_files(self): ... - def net_connections(self, kind: str = ...): ... - def num_fds(self): ... - def ppid(self): ... - def uids(self, _uids_re=...): ... - def gids(self, _gids_re=...): ... +import sys + +if sys.platform == "linux": + import enum + import re + from _typeshed import FileDescriptorOrPath + from collections import defaultdict + from collections.abc import Callable, Generator, Sequence + from typing import Final, TypeVar, overload + from typing_extensions import ParamSpec + + from psutil._common import ( + ENCODING as ENCODING, + NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, + NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, + NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + bcat as bcat, + cat as cat, + debug as debug, + decode as decode, + get_procfs_path as get_procfs_path, + isfile_strict as isfile_strict, + memoize as memoize, + memoize_when_activated as memoize_when_activated, + open_binary as open_binary, + open_text as open_text, + parse_environ_block as parse_environ_block, + path_exists_strict as path_exists_strict, + supports_ipv6 as supports_ipv6, + usage_percent as usage_percent, + ) + + from . import _ntuples as ntp, _psposix, _psutil_linux + + _P = ParamSpec("_P") + _R = TypeVar("_R") + + __extra__all__: Final[list[str]] + POWER_SUPPLY_PATH: Final = "/sys/class/power_supply" + HAS_PROC_SMAPS: Final[bool] + HAS_PROC_SMAPS_ROLLUP: Final[bool] + HAS_PROC_IO_PRIORITY: Final[bool] + HAS_CPU_AFFINITY: Final[bool] + CLOCK_TICKS: Final[int] + PAGESIZE: Final[int] + LITTLE_ENDIAN: Final[bool] + UNSET: object + DISK_SECTOR_SIZE: Final = 512 + + class AddressFamily(enum.IntEnum): + AF_LINK = 17 # = socket.AF_PACKET + + AF_LINK: Final = AddressFamily.AF_LINK + + class IOPriority(enum.IntEnum): + IOPRIO_CLASS_NONE = 0 + IOPRIO_CLASS_RT = 1 + IOPRIO_CLASS_BE = 2 + IOPRIO_CLASS_IDLE = 3 + + IOPRIO_CLASS_NONE: Final = IOPriority.IOPRIO_CLASS_NONE + IOPRIO_CLASS_RT: Final = IOPriority.IOPRIO_CLASS_RT + IOPRIO_CLASS_BE: Final = IOPriority.IOPRIO_CLASS_BE + IOPRIO_CLASS_IDLE: Final = IOPriority.IOPRIO_CLASS_IDLE + + PROC_STATUSES: Final[dict[str, str]] + TCP_STATUSES: Final[dict[str, str]] + + def readlink(path: str) -> str: ... + def file_flags_to_mode(flags: int) -> str: ... + def is_storage_device(name: str) -> bool: ... + def _scputimes_ntuple(procfs_path: str) -> type[ntp.scputimes]: ... + scputimes = ntp.scputimes + def calculate_avail_vmem(mems: dict[bytes, int]) -> int: ... + def virtual_memory() -> ntp.svmem: ... + def swap_memory() -> ntp.sswap: ... + heap_info = _psutil_linux.heap_info + heap_trim = _psutil_linux.heap_trim + def cpu_times() -> ntp.scputimes: ... + def per_cpu_times() -> list[ntp.scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... + def cpu_freq() -> list[ntp.scpufreq]: ... + + net_if_addrs = _psutil_linux.net_if_addrs + + class _Ipv6UnsupportedError(Exception): ... + + class NetConnections: + tmap: dict[str, tuple[tuple[str, int, int | None], ...]] + def __init__(self) -> None: ... + def get_proc_inodes(self, pid: int) -> defaultdict[str, list[tuple[int, int]]]: ... + def get_all_inodes(self) -> dict[str, list[tuple[int, int]]]: ... + @staticmethod + def decode_address(addr: str, family: int) -> ntp.addr | tuple[()]: ... + @staticmethod + def process_inet( + file: str, family: int, type_: int, inodes: dict[str, list[tuple[int, int]]], filter_pid: int | None = None + ) -> Generator[tuple[int, int, int, ntp.addr | tuple[()], ntp.addr | tuple[()], str, int | None]]: ... + @staticmethod + def process_unix( + file: FileDescriptorOrPath, family: int, inodes: dict[str, list[tuple[int, int]]], filter_pid: int | None = None + ) -> Generator[tuple[int, int, int, str, str, str, int | None]]: ... + @overload + def retrieve(self, kind: str, pid: int) -> list[ntp.pconn]: ... + @overload + def retrieve(self, kind: str, pid: None = None) -> list[ntp.sconn]: ... + + def net_connections(kind: str = "inet") -> list[ntp.sconn]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def net_if_stats() -> dict[str, ntp.snicstats]: ... + + disk_usage = _psposix.disk_usage + + def disk_io_counters(perdisk: bool = False) -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + + class RootFsDeviceFinder: + __slots__ = ["major", "minor"] + major: int + minor: int + def __init__(self) -> None: ... + def ask_proc_partitions(self) -> str | None: ... + def ask_sys_dev_block(self) -> str | None: ... + def ask_sys_class_block(self) -> str | None: ... + def find(self) -> str | None: ... + + def disk_partitions(all: bool = False) -> list[ntp.sdiskpart]: ... + def sensors_temperatures() -> dict[str, list[tuple[str, float, float | None, float | None]]]: ... + def sensors_fans() -> dict[str, list[ntp.sfan]]: ... + def sensors_battery() -> ntp.sbattery | None: ... + def users() -> list[ntp.suser]: ... + def boot_time() -> float: ... + def pids() -> list[int]: ... + def pid_exists(pid: int) -> bool: ... + def ppid_map() -> dict[int, int]: ... + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + + class Process: + __slots__ = ["_cache", "_ctime", "_name", "_ppid", "_procfs_path", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def environ(self) -> dict[str, str]: ... + def terminal(self) -> str | None: ... + def io_counters(self) -> ntp.pio: ... + def cpu_times(self) -> ntp.pcputimes: ... + def cpu_num(self) -> int: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def create_time(self, monotonic: bool = False) -> float: ... + def memory_info(self) -> ntp.pmem: ... + def memory_full_info(self) -> ntp.pfullmem: ... + def memory_maps(self) -> list[tuple[str, str, str, int, int, int, int, int, int, int, int, int, int]]: ... + def cwd(self) -> str: ... + def num_ctx_switches(self, _ctxsw_re: re.Pattern[bytes] = ...) -> ntp.pctxsw: ... + def num_threads(self, _num_threads_re: re.Pattern[bytes] = ...) -> int: ... + def threads(self) -> list[ntp.pthread]: ... + def nice_get(self) -> int: ... + def nice_set(self, value: int) -> None: ... + def cpu_affinity_get(self) -> list[int]: ... + def cpu_affinity_set(self, cpus: Sequence[int]) -> None: ... + def ionice_get(self) -> ntp.pionice: ... + def ionice_set(self, ioclass: int, value: int | None) -> None: ... + @overload + def rlimit(self, resource_: int, limits: tuple[int, int]) -> None: ... + @overload + def rlimit(self, resource_: int, limits: None = None) -> tuple[int, int]: ... + def status(self) -> str: ... + def open_files(self) -> list[ntp.popenfile]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + def num_fds(self) -> int: ... + def ppid(self) -> int: ... + def uids(self, _uids_re: re.Pattern[bytes] = ...) -> ntp.puids: ... + def gids(self, _gids_re: re.Pattern[bytes] = ...) -> ntp.pgids: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi index dee1fd6ed2..037940bd49 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi @@ -1,109 +1,90 @@ -from _typeshed import Incomplete -from typing import Any, NamedTuple +import sys -from psutil._common import ( - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - ZombieProcess as ZombieProcess, - conn_tmap as conn_tmap, - conn_to_ntuple as conn_to_ntuple, - isfile_strict as isfile_strict, - parse_environ_block as parse_environ_block, - usage_percent as usage_percent, -) +if sys.platform == "darwin": + from collections.abc import Callable + from typing import Final, TypeVar + from typing_extensions import ParamSpec -__extra__all__: Any -PAGESIZE: Any -AF_LINK: Any -TCP_STATUSES: Any -PROC_STATUSES: Any -kinfo_proc_map: Any -pidtaskinfo_map: Any + from psutil._common import ( + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + debug as debug, + isfile_strict as isfile_strict, + memoize_when_activated as memoize_when_activated, + parse_environ_block as parse_environ_block, + usage_percent as usage_percent, + ) -class scputimes(NamedTuple): - user: float - nice: float - system: float - idle: float + from . import _ntuples as ntp, _psposix, _psutil_osx -class svmem(NamedTuple): - total: int - available: int - percent: float - used: int - free: int - active: int - inactive: int - wired: int + _P = ParamSpec("_P") + _R = TypeVar("_R") -class pmem(NamedTuple): - rss: Any - vms: Any - pfaults: Any - pageins: Any + __extra__all__: Final[list[str]] + PAGESIZE: Final[int] + AF_LINK: Final[int] + TCP_STATUSES: Final[dict[int, str]] + PROC_STATUSES: Final[dict[int, str]] + kinfo_proc_map: Final[dict[str, int]] + pidtaskinfo_map: Final[dict[str, int]] -class pfullmem(NamedTuple): - rss: Incomplete - vms: Incomplete - pfaults: Incomplete - pageins: Incomplete - uss: Incomplete + def virtual_memory() -> ntp.svmem: ... + def swap_memory() -> ntp.sswap: ... + heap_info = _psutil_osx.heap_info + heap_trim = _psutil_osx.heap_trim + def cpu_times() -> ntp.scputimes: ... + def per_cpu_times() -> list[ntp.scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... + def cpu_freq() -> list[ntp.scpufreq]: ... -def virtual_memory() -> svmem: ... -def swap_memory(): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores() -> int | None: ... -def cpu_stats(): ... -def cpu_freq(): ... + disk_usage = _psposix.disk_usage + disk_io_counters = _psutil_osx.disk_io_counters + def disk_partitions(all: bool = False) -> list[ntp.sdiskpart]: ... + def sensors_battery() -> ntp.sbattery | None: ... -disk_usage: Any -disk_io_counters: Any + net_io_counters = _psutil_osx.net_io_counters + net_if_addrs = _psutil_osx.net_if_addrs + def net_connections(kind: str = "inet") -> list[ntp.sconn]: ... + def net_if_stats() -> dict[str, ntp.snicstats]: ... + def boot_time() -> float: ... + INIT_BOOT_TIME: float + def adjust_proc_create_time(ctime: float) -> float: ... + def users() -> list[ntp.suser]: ... + def pids() -> list[int]: ... + pid_exists = _psposix.pid_exists + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... -def disk_partitions(all: bool = False): ... -def sensors_battery(): ... - -net_io_counters: Any -net_if_addrs: Any - -def net_connections(kind: str = "inet"): ... -def net_if_stats(): ... -def boot_time(): ... -def users(): ... -def pids(): ... - -pid_exists: Any - -def is_zombie(pid): ... -def wrap_exceptions(fun): ... - -class Process: - __slots__ = ["_cache", "_name", "_ppid", "pid"] - pid: Any - def __init__(self, pid) -> None: ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def ppid(self): ... - def cwd(self): ... - def uids(self): ... - def gids(self): ... - def terminal(self): ... - def memory_info(self): ... - def memory_full_info(self): ... - def cpu_times(self): ... - def create_time(self): ... - def num_ctx_switches(self): ... - def num_threads(self): ... - def open_files(self): ... - def net_connections(self, kind: str = "inet"): ... - def num_fds(self): ... - def wait(self, timeout=None): ... - def nice_get(self): ... - def nice_set(self, value): ... - def status(self): ... - def threads(self): ... + class Process: + __slots__ = ["_cache", "_name", "_ppid", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def environ(self) -> dict[str, str]: ... + def ppid(self) -> int: ... + def cwd(self) -> str: ... + def uids(self) -> ntp.puids: ... + def gids(self) -> ntp.puids: ... + def terminal(self) -> str | None: ... + def memory_info(self) -> ntp.pmem: ... + def memory_full_info(self) -> ntp.pfullmem: ... + def cpu_times(self) -> ntp.pcputimes: ... + def create_time(self, monotonic: bool = False) -> float: ... + def num_ctx_switches(self) -> ntp.pctxsw: ... + def num_threads(self) -> int: ... + def open_files(self) -> list[ntp.popenfile]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + def num_fds(self) -> int: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def nice_get(self) -> int: ... + def nice_set(self, value: int) -> None: ... + def status(self) -> str: ... + def threads(self) -> list[ntp.pthread]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psposix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psposix.pyi index 46c218915a..8a9e73ca18 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psposix.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psposix.pyi @@ -1,6 +1,80 @@ -def pid_exists(pid): ... -def wait_pid(pid, timeout=None, proc_name=None, _waitpid=..., _timer=..., _min=..., _sleep=..., _pid_exists=...): ... -def disk_usage(path): ... -def get_terminal_map(): ... +import enum +import sys +from _typeshed import FileDescriptorOrPath, Incomplete, StrOrBytesPath, Unused +from collections.abc import Callable + +from . import _ntuples as ntp + +def pid_exists(pid: int) -> bool: ... + +# Sync with `signal.Signals`, but with opposite values: +class Negsignal(enum.IntEnum): + SIGABRT = -6 + SIGFPE = -8 + SIGILL = -4 + SIGINT = -2 + SIGSEGV = -11 + SIGTERM = -15 + + if sys.platform == "win32": + SIGBREAK = -21 + CTRL_C_EVENT = 0 + CTRL_BREAK_EVENT = -1 + else: + SIGALRM = -14 + SIGBUS = -7 + SIGCHLD = -17 + SIGCONT = -18 + SIGHUP = -1 + SIGIO = -29 + SIGIOT = -6 + SIGKILL = -9 + SIGPIPE = -13 + SIGPROF = -27 + SIGQUIT = -3 + SIGSTOP = -19 + SIGSYS = -31 + SIGTRAP = -5 + SIGTSTP = -20 + SIGTTIN = -21 + SIGTTOU = -22 + SIGURG = -23 + SIGUSR1 = -10 + SIGUSR2 = -12 + SIGVTALRM = -26 + SIGWINCH = -28 + SIGXCPU = -24 + SIGXFSZ = -25 + if sys.platform != "linux": + SIGEMT = -7 + SIGINFO = -29 + if sys.platform != "darwin": + SIGCLD = -17 + SIGPOLL = -29 + SIGPWR = -30 + SIGRTMAX = -64 + SIGRTMIN = -34 + if sys.version_info >= (3, 11): + SIGSTKFLT = -16 + +def negsig_to_enum(num: int) -> int: ... +def wait_pid( + pid: int, + timeout: float | None = None, + proc_name: str | None = None, + _waitpid: Unused = ..., + _timer: Callable[[], float] = ..., + _min: Callable[..., Incomplete] = ..., + _sleep: Callable[[float], None] = ..., + _pid_exists: Callable[[int], bool] = ..., +) -> int | None: ... + +if sys.platform == "darwin": + def disk_usage(path: StrOrBytesPath) -> ntp.sdiskusage: ... + +else: + def disk_usage(path: FileDescriptorOrPath) -> ntp.sdiskusage: ... + +def get_terminal_map() -> dict[int, str]: ... __all__ = ["pid_exists", "wait_pid", "disk_usage", "get_terminal_map"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pssunos.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pssunos.pyi index 3080720cca..f21ab5e6ba 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pssunos.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pssunos.pyi @@ -1,129 +1,151 @@ -from _typeshed import Incomplete -from typing import NamedTuple - -from psutil._common import ( - AF_INET6 as AF_INET6, - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - ZombieProcess as ZombieProcess, - debug as debug, - get_procfs_path as get_procfs_path, - isfile_strict as isfile_strict, - memoize_when_activated as memoize_when_activated, - sockfam_to_enum as sockfam_to_enum, - socktype_to_enum as socktype_to_enum, - usage_percent as usage_percent, -) - -__extra__all__: Incomplete -PAGE_SIZE: Incomplete -AF_LINK: Incomplete -IS_64_BIT: Incomplete -CONN_IDLE: str -CONN_BOUND: str -PROC_STATUSES: Incomplete -TCP_STATUSES: Incomplete -proc_info_map: Incomplete - -class scputimes(NamedTuple): - user: Incomplete - system: Incomplete - idle: Incomplete - iowait: Incomplete - -class pcputimes(NamedTuple): - user: Incomplete - system: Incomplete - children_user: Incomplete - children_system: Incomplete - -class svmem(NamedTuple): - total: Incomplete - available: Incomplete - percent: Incomplete - used: Incomplete - free: Incomplete - -class pmem(NamedTuple): - rss: Incomplete - vms: Incomplete - -pfullmem = pmem - -class pmmap_grouped(NamedTuple): - path: Incomplete - rss: Incomplete - anonymous: Incomplete - locked: Incomplete - -pmmap_ext: Incomplete - -def virtual_memory(): ... -def swap_memory(): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores(): ... -def cpu_stats(): ... - -disk_io_counters: Incomplete -disk_usage: Incomplete - -def disk_partitions(all: bool = ...): ... - -net_io_counters: Incomplete -net_if_addrs: Incomplete - -def net_connections(kind, _pid: int = ...): ... -def net_if_stats(): ... -def boot_time(): ... -def users(): ... -def pids(): ... -def pid_exists(pid): ... -def wrap_exceptions(fun): ... - -class Process: - pid: Incomplete - def __init__(self, pid) -> None: ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def create_time(self): ... - def num_threads(self): ... - def nice_get(self): ... - def nice_set(self, value): ... - def ppid(self): ... - def uids(self): ... - def gids(self): ... - def cpu_times(self): ... - def cpu_num(self): ... - def terminal(self): ... - def cwd(self): ... - def memory_info(self): ... - memory_full_info: Incomplete - def status(self): ... - def threads(self): ... - def open_files(self): ... - def net_connections(self, kind: str = ...): ... - - class nt_mmap_grouped(NamedTuple): +import sys + +# sys.platform.startswith(("sunos", "solaris")): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from _typeshed import Incomplete + from collections.abc import Callable + from typing import Final, Literal, NamedTuple, TypeVar, overload + from typing_extensions import ParamSpec + + from psutil._common import ( + AF_INET6 as AF_INET6, + ENCODING as ENCODING, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + debug as debug, + get_procfs_path as get_procfs_path, + isfile_strict as isfile_strict, + memoize_when_activated as memoize_when_activated, + sockfam_to_enum as sockfam_to_enum, + socktype_to_enum as socktype_to_enum, + usage_percent as usage_percent, + ) + + from . import _ntuples as ntp, _psposix, _psutil_sunos + + _P = ParamSpec("_P") + _R = TypeVar("_R") + + __extra__all__: Final[list[str]] + PAGE_SIZE: Final[int] + AF_LINK: Final[int] + IS_64_BIT: Final[bool] + CONN_IDLE: Final = "IDLE" + CONN_BOUND: Final = "BOUND" + PROC_STATUSES: Final[dict[int, str]] + TCP_STATUSES: Final[dict[int, str]] + proc_info_map: Final[dict[str, int]] + + class scputimes(NamedTuple): + user: float + system: float + idle: float + iowait: float + + class pcputimes(NamedTuple): + user: float + system: float + children_user: float + children_system: float + + class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + + class pmem(NamedTuple): + rss: int + vms: int + + pfullmem = pmem + + class pmmap_grouped(NamedTuple): path: Incomplete rss: Incomplete - anon: Incomplete + anonymous: Incomplete locked: Incomplete - class nt_mmap_ext(NamedTuple): + class pmmap_ext(NamedTuple): addr: Incomplete perms: Incomplete path: Incomplete rss: Incomplete - anon: Incomplete + anonymous: Incomplete locked: Incomplete - def memory_maps(self): ... - def num_fds(self): ... - def num_ctx_switches(self): ... - def wait(self, timeout: Incomplete | None = ...): ... + def virtual_memory() -> svmem: ... + def swap_memory() -> ntp.sswap: ... + def cpu_times() -> scputimes: ... + def per_cpu_times() -> list[scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... + + disk_io_counters = _psutil_sunos.disk_io_counters + disk_usage = _psposix.disk_usage + + def disk_partitions(all: bool = False) -> list[ntp.sdiskpart]: ... + + net_io_counters = _psutil_sunos.net_io_counters + net_if_addrs = _psutil_sunos.net_if_addrs + + @overload + def net_connections(kind: str, _pid: Literal[-1] = -1) -> list[ntp.sconn]: ... + @overload + def net_connections(kind: str, _pid: int = -1) -> list[ntp.pconn]: ... + def net_if_stats() -> dict[str, ntp.snicstats]: ... + def boot_time() -> float: ... + def users() -> list[ntp.suser]: ... + def pids() -> list[int]: ... + def pid_exists(pid: int) -> bool: ... + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + + class Process: + __slots__ = ["_cache", "_name", "_ppid", "_procfs_path", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str] | None: ... + def environ(self) -> dict[str, str]: ... + def create_time(self) -> float: ... + def num_threads(self) -> int: ... + def nice_get(self) -> int: ... + def nice_set(self, value: int) -> None: ... + def ppid(self) -> int: ... + def uids(self) -> ntp.puids: ... + def gids(self) -> ntp.puids: ... + def cpu_times(self) -> ntp.pcputimes: ... + def cpu_num(self) -> int: ... + def terminal(self) -> str | None: ... + def cwd(self) -> str: ... + def memory_info(self) -> pmem: ... + memory_full_info = memory_info + def status(self) -> str: ... + def threads(self) -> list[ntp.pthread]: ... + def open_files(self) -> list[ntp.popenfile]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + + class nt_mmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + anon: Incomplete + locked: Incomplete + + class nt_mmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + anon: Incomplete + locked: Incomplete + + def memory_maps(self) -> list[tuple[str, str, str, int, int, int]]: ... + def num_fds(self) -> int: ... + def num_ctx_switches(self) -> ntp.pctxsw: ... + def wait(self, timeout: float | None = None) -> int | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_aix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_aix.pyi new file mode 100644 index 0000000000..17977e3ead --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_aix.pyi @@ -0,0 +1,58 @@ +import sys + +# sys.platform.startswith("aix"): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from typing import Final + + AF_LINK: Final = 18 + + def getpagesize() -> int: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, str | None, str | None]]: ... + def net_if_flags(nic_name: str, /) -> list[str]: ... + def net_if_is_running(nic_name: str, /) -> bool: ... + def net_if_mtu(nic_name: str, /) -> int: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + + version: Final[int] + SIDL: Final[int] + SZOMB: Final[int] + SACTIVE: Final[int] + SSWAP: Final[int] + SSTOP: Final[int] + TCPS_CLOSED: Final[int] + TCPS_CLOSING: Final[int] + TCPS_CLOSE_WAIT: Final[int] + TCPS_LISTEN: Final[int] + TCPS_ESTABLISHED: Final[int] + TCPS_SYN_SENT: Final[int] + TCPS_SYN_RCVD: Final[int] + TCPS_FIN_WAIT_1: Final[int] + TCPS_FIN_WAIT_2: Final[int] + TCPS_LAST_ACK: Final[int] + TCPS_TIME_WAIT: Final[int] + PSUTIL_CONN_NONE: Final = 128 + + def proc_args(pid: int, /) -> list[str]: ... + def proc_basic_info(pid: int, procfs_path: str, /) -> tuple[int, int, int, float, int, int, int, int]: ... + def proc_cpu_times(pid: int, procfs_path: str, /) -> tuple[float, float, float, float]: ... + def proc_cred(pid: int, procfs_path: str, /) -> tuple[int, int, int, int, int, int]: ... + def proc_environ(pid: int, /) -> dict[str, str]: ... + def proc_name(pid: int, procfs_path: str, /) -> str: ... + def proc_threads(pid: int, /) -> list[tuple[int, float, float]]: ... + def proc_io_counters(pid: int, /) -> tuple[int, int, int, int]: ... + def proc_num_ctx_switches(requested_pid: int, /) -> tuple[int, int]: ... + def boot_time() -> float: ... + def disk_io_counters() -> dict[str, tuple[int, int, int, int, int, int]]: ... + def disk_partitions() -> list[tuple[str, str, str, str]]: ... + def per_cpu_times() -> list[tuple[float, float, float, float]]: ... + def swap_mem() -> tuple[int, int, int, int]: ... + def virtual_mem() -> tuple[int, int, int, int, int]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def cpu_stats() -> tuple[int, int, int, int]: ... + def net_connections( + requested_pid: int, / + ) -> list[tuple[int, int, int, str | tuple[str, int], str | tuple[str, int] | tuple[()], int, int]]: ... + def net_if_stats(nic_name: str, /) -> tuple[bool, int]: ... # It's actually list of 2 elements + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_bsd.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_bsd.pyi new file mode 100644 index 0000000000..2d876cd2cf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_bsd.pyi @@ -0,0 +1,141 @@ +import sys + +# sys.platform.startswith(("freebsd", "midnightbsd", "openbsd", "netbsd")): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from collections.abc import Sequence + from socket import AddressFamily, SocketKind + from typing import Final, overload + + AF_LINK: Final[int] + RLIMIT_AS: Final[int] # only FreeBSD + RLIMIT_CORE: Final[int] # only FreeBSD + RLIMIT_CPU: Final[int] # only FreeBSD + RLIMIT_DATA: Final[int] # only FreeBSD + RLIMIT_FSIZE: Final[int] # only FreeBSD + RLIMIT_MEMLOCK: Final[int] # only FreeBSD + RLIMIT_NOFILE: Final[int] # only FreeBSD + RLIMIT_NPROC: Final[int] # only FreeBSD + RLIMIT_RSS: Final[int] # only FreeBSD + RLIMIT_STACK: Final[int] # only FreeBSD + RLIMIT_SWAP: Final[int] # only FreeBSD + RLIMIT_SBSIZE: Final[int] # only FreeBSD + RLIMIT_NPTS: Final[int] # only FreeBSD + RLIM_INFINITY: Final[int] # only FreeBSD + + def getpagesize() -> int: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, str | None, str | None]]: ... + def net_if_flags(nic_name: str, /) -> list[str]: ... + def net_if_is_running(nic_name: str, /) -> bool: ... + def net_if_mtu(nic_name: str, /) -> int: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + def net_if_duplex_speed(nic_name: str, /) -> tuple[int, int]: ... # It's actually list of 2 elements + def proc_is_zombie(pid: int, /) -> bool: ... + + version: Final[int] + SIDL: Final[int] + SRUN: Final[int] + SSLEEP: Final[int] + SSTOP: Final[int] + SZOMB: Final[int] + SWAIT: Final[int] # only FreeBSD + SLOCK: Final[int] # only FreeBSD + SDEAD: Final[int] # only OpenBSD and NetBSD + SONPROC: Final[int] # only OpenBSD and NetBSD + SSUSPENDED: Final[int] # only NetBSD + TCPS_CLOSED: Final[int] + TCPS_CLOSING: Final[int] + TCPS_CLOSE_WAIT: Final[int] + TCPS_LISTEN: Final[int] + TCPS_ESTABLISHED: Final[int] + TCPS_SYN_SENT: Final[int] + TCPS_SYN_RECEIVED: Final[int] + TCPS_FIN_WAIT_1: Final[int] + TCPS_FIN_WAIT_2: Final[int] + TCPS_LAST_ACK: Final[int] + TCPS_TIME_WAIT: Final[int] + PSUTIL_CONN_NONE: Final = 128 + + def proc_cmdline(pid: int, /) -> list[str]: ... + def proc_cwd(pid: int, /) -> str: ... + def proc_environ(pid: int, /) -> dict[str, str]: ... + def proc_name(pid: int, /) -> str: ... + def proc_num_fds(pid: int, /) -> int: ... + def proc_oneshot_info( + pid: int, / + ) -> tuple[ + int, + int, + int, + int, + int, + int, + int, + int, + int, + float, + int, + int, + int, + int, + float, + float, + float, + float, + int, + int, + int, + int, + int, + int, + str, + ]: ... + def proc_open_files(pid: int, /) -> list[tuple[str, int]]: ... + def proc_threads(pid: int, /) -> list[tuple[int, float, float]]: ... + def proc_num_threads(pid: int, /) -> int: ... # only FreeBSD and NetBSD + def proc_cpu_affinity_get(pid: int, /) -> list[int]: ... # only FreeBSD + def proc_cpu_affinity_set(pid: int, cpu_set: Sequence[int], /) -> None: ... # only FreeBSD + def proc_exe(pid: int, /) -> str: ... # only FreeBSD + def proc_getrlimit(pid: int, resource: int, /) -> tuple[int, int]: ... # only FreeBSD + def proc_memory_maps(pid: int, /) -> list[tuple[str, str, str, int, int, int, int]]: ... # only FreeBSD + def proc_net_connections( # only FreeBSD + pid: int, af_filter: Sequence[AddressFamily | int | None], type_filter: Sequence[SocketKind | int | None], / + ) -> list[tuple[int, int, int, tuple[str, int], tuple[str, int] | tuple[()], int] | tuple[int, int, int, str, str, int]]: ... + def proc_setrlimit(pid: int, resource: int, soft: int, hard: int, /) -> None: ... # only FreeBSD + def boot_time() -> float: ... + def cpu_count_logical() -> int | None: ... + def cpu_stats() -> tuple[int, ...]: ... # tuple's length depends on OS + def cpu_times() -> tuple[float, float, float, float, float]: ... + def disk_io_counters() -> dict[str, tuple[int, ...]]: ... # tuple's length depends on OS + def disk_partitions() -> list[tuple[str, str, str, str]]: ... + @overload # for FreeBSD + def net_connections( + af_filter: Sequence[AddressFamily | int | None], type_filter: Sequence[SocketKind | int | None], / + ) -> list[ + tuple[int, int, int, tuple[str, int], tuple[str, int] | tuple[()], int, int] | tuple[int, int, int, str, str, int, int] + ]: ... + @overload # for OpenBSD + def net_connections( + pid: int, af_filter: Sequence[AddressFamily | int | None], type_filter: Sequence[SocketKind | int | None], / + ) -> list[ + tuple[int, int, int, tuple[str, int], tuple[str, int] | tuple[()], int, int] | tuple[int, int, int, str, str, int, int] + ]: ... + @overload # for NetBSD + def net_connections(pid: int, kind: str, /) -> list[tuple[int, int, int, str, str, int, int]]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def per_cpu_times() -> list[tuple[float, float, float, float, float]]: ... + def pids() -> list[int]: ... + def swap_mem() -> tuple[int, int, int, int, int]: ... + def heap_info() -> tuple[int, int]: ... # only FreeBSD and NetBSD + def heap_trim() -> None: ... # only FreeBSD and NetBSD + def users() -> list[tuple[str, str, str, float, int | None]]: ... # returns None only in OpenBSD + def virtual_mem() -> tuple[int, ...]: ... # tuple's length depends on OS + @overload + def cpu_freq() -> int: ... # only OpenBSD + @overload + def cpu_freq(core: int, /) -> tuple[int, str]: ... # only FreeBSD + def cpu_topology() -> str | None: ... # only FreeBSD + def sensors_battery() -> tuple[int, int, int]: ... # only FreeBSD + def sensors_cpu_temperature(core: int, /) -> tuple[int, int]: ... # only FreeBSD + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_linux.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_linux.pyi index 877715d040..d242799613 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_linux.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_linux.pyi @@ -1,17 +1,49 @@ -from typing import Any +import sys -DUPLEX_FULL: int -DUPLEX_HALF: int -DUPLEX_UNKNOWN: int -version: int +if sys.platform == "linux": + from collections.abc import Sequence + from typing import Final -def check_pid_range(pid: int, /) -> None: ... -def disk_partitions(*args, **kwargs) -> Any: ... -def linux_sysinfo(*args, **kwargs) -> Any: ... -def net_if_duplex_speed(*args, **kwargs) -> Any: ... -def proc_cpu_affinity_get(*args, **kwargs) -> Any: ... -def proc_cpu_affinity_set(*args, **kwargs) -> Any: ... -def proc_ioprio_get(*args, **kwargs) -> Any: ... -def proc_ioprio_set(*args, **kwargs) -> Any: ... -def set_debug(*args, **kwargs) -> Any: ... -def users(*args, **kwargs) -> Any: ... + RLIMIT_AS: Final[int] + RLIMIT_CORE: Final[int] + RLIMIT_CPU: Final[int] + RLIMIT_DATA: Final[int] + RLIMIT_FSIZE: Final[int] + RLIMIT_MEMLOCK: Final[int] + RLIMIT_NOFILE: Final[int] + RLIMIT_NPROC: Final[int] + RLIMIT_RSS: Final[int] + RLIMIT_STACK: Final[int] + RLIMIT_LOCKS: Final[int] + RLIMIT_MSGQUEUE: Final[int] + RLIMIT_NICE: Final[int] + RLIMIT_RTPRIO: Final[int] + RLIMIT_RTTIME: Final[int] + RLIMIT_SIGPENDING: Final[int] + RLIM_INFINITY: Final[int] + + def getpagesize() -> int: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, str | None, str | None]]: ... + def net_if_flags(nic_name: str, /) -> list[str]: ... + def net_if_is_running(nic_name: str, /) -> bool: ... + def net_if_mtu(nic_name: str, /) -> int: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + def users() -> list[tuple[str, str, str, float, int]]: ... + + version: Final[int] + DUPLEX_FULL: Final[int] + DUPLEX_HALF: Final[int] + DUPLEX_UNKNOWN: Final[int] + + def proc_ioprio_get(pid: int, /) -> tuple[int, int]: ... + def proc_ioprio_set(pid: int, ioclass: int, iodata: int, /) -> None: ... + def proc_cpu_affinity_get(pid: int, /) -> list[int]: ... + def proc_cpu_affinity_set(pid: int, cpu_set: Sequence[int], /) -> None: ... + def disk_partitions(mtab_path: str, /) -> list[tuple[str, str, str, str]]: ... + def net_if_duplex_speed(nic_name: str, /) -> tuple[int, int]: ... # It's actually list of 2 elements + def heap_info() -> tuple[int, int]: ... + def heap_trim() -> bool: ... + def linux_sysinfo() -> tuple[int, int, int, int, int, int, int]: ... + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_osx.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_osx.pyi index 3a6ea7372d..2ea2baca3d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_osx.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_osx.pyi @@ -1,51 +1,79 @@ -from typing import Any +import sys -PSUTIL_CONN_NONE: int -SIDL: int -SRUN: int -SSLEEP: int -SSTOP: int -SZOMB: int -TCPS_CLOSED: int -TCPS_CLOSE_WAIT: int -TCPS_CLOSING: int -TCPS_ESTABLISHED: int -TCPS_FIN_WAIT_1: int -TCPS_FIN_WAIT_2: int -TCPS_LAST_ACK: int -TCPS_LISTEN: int -TCPS_SYN_RECEIVED: int -TCPS_SYN_SENT: int -TCPS_TIME_WAIT: int -version: int +if sys.platform == "darwin": + from _typeshed import StrOrBytesPath + from collections.abc import Sequence + from socket import AddressFamily, SocketKind + from typing import Final, TypeVar -def boot_time(*args, **kwargs) -> Any: ... -def check_pid_range(pid: int, /) -> None: ... -def cpu_count_cores(*args, **kwargs) -> Any: ... -def cpu_count_logical(*args, **kwargs) -> Any: ... -def cpu_freq(*args, **kwargs) -> Any: ... -def cpu_stats(*args, **kwargs) -> Any: ... -def cpu_times(*args, **kwargs) -> Any: ... -def disk_io_counters(*args, **kwargs) -> Any: ... -def disk_partitions(*args, **kwargs) -> Any: ... -def disk_usage_used(*args, **kwargs) -> Any: ... -def net_io_counters(*args, **kwargs) -> Any: ... -def per_cpu_times(*args, **kwargs) -> Any: ... -def pids(*args, **kwargs) -> Any: ... -def proc_cmdline(*args, **kwargs) -> Any: ... -def proc_net_connections(*args, **kwargs) -> Any: ... -def proc_cwd(*args, **kwargs) -> Any: ... -def proc_environ(*args, **kwargs) -> Any: ... -def proc_exe(*args, **kwargs) -> Any: ... -def proc_kinfo_oneshot(*args, **kwargs) -> Any: ... -def proc_memory_uss(*args, **kwargs) -> Any: ... -def proc_name(*args, **kwargs) -> Any: ... -def proc_num_fds(*args, **kwargs) -> Any: ... -def proc_open_files(*args, **kwargs) -> Any: ... -def proc_pidtaskinfo_oneshot(*args, **kwargs) -> Any: ... -def proc_threads(*args, **kwargs) -> Any: ... -def sensors_battery(*args, **kwargs) -> Any: ... -def set_debug(*args, **kwargs) -> Any: ... -def swap_mem(*args, **kwargs) -> Any: ... -def users(*args, **kwargs) -> Any: ... -def virtual_mem(*args, **kwargs) -> Any: ... + _T = TypeVar("_T") + + AF_LINK: Final = 18 + + def getpagesize() -> int: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, str | None, str | None]]: ... + def net_if_flags(nic_name: str, /) -> list[str]: ... + def net_if_is_running(nic_name: str, /) -> bool: ... + def net_if_mtu(nic_name: str, /) -> int: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + def net_if_duplex_speed(nic_name: str, /) -> tuple[int, int]: ... # It's actually list of 2 elements + def users() -> list[tuple[str, str, str, float, int]]: ... + def proc_is_zombie(pid: int, /) -> bool: ... + + version: Final[int] + SIDL: Final = 1 + SRUN: Final = 2 + SSLEEP: Final = 3 + SSTOP: Final = 4 + SZOMB: Final = 5 + TCPS_CLOSED: Final = 0 + TCPS_CLOSING: Final = 7 + TCPS_CLOSE_WAIT: Final = 5 + TCPS_LISTEN: Final = 1 + TCPS_ESTABLISHED: Final = 4 + TCPS_SYN_SENT: Final = 2 + TCPS_SYN_RECEIVED: Final = 3 + TCPS_FIN_WAIT_1: Final = 6 + TCPS_FIN_WAIT_2: Final = 9 + TCPS_LAST_ACK: Final = 8 + TCPS_TIME_WAIT: Final = 10 + PSUTIL_CONN_NONE: Final = 128 + + def proc_cmdline(pid: int, /) -> list[str]: ... + def proc_cwd(pid: int, /) -> str: ... + def proc_environ(pid: int, /) -> str: ... + def proc_exe(pid: int, /) -> str: ... + def proc_kinfo_oneshot(pid: int, /) -> tuple[int, int, int, int, int, int, int, float, int, str]: ... + def proc_memory_uss(pid: int, /) -> int: ... + def proc_name(pid: int, /) -> str: ... + def proc_net_connections( + pid: int, af_filter: Sequence[AddressFamily | int | None], type_filter: Sequence[SocketKind | int | None], / + ) -> list[ + tuple[int, int, int, tuple[str | None, int], tuple[str | None, int] | tuple[()], int] + | tuple[int, int, int, str, str, int] + ]: ... + def proc_num_fds(pid: int, /) -> int: ... + def proc_open_files(pid: int, /) -> list[tuple[str, int]]: ... + def proc_pidtaskinfo_oneshot(pid: int, /) -> tuple[float, float, int, int, int, int, int, int]: ... + def proc_threads(pid: int, /) -> list[tuple[int, float, float]]: ... + def boot_time() -> float: ... + def cpu_count_cores() -> int | None: ... + def cpu_count_logical() -> int | None: ... + def cpu_freq() -> tuple[int, int, int]: ... + def cpu_stats() -> tuple[int, int, int, int, int]: ... + def cpu_times() -> tuple[float, float, float, float]: ... + def disk_io_counters() -> dict[str, tuple[int, int, int, int, int, int]]: ... + def disk_partitions() -> list[tuple[str, str, str, str]]: ... + def disk_usage_used(mount_point: StrOrBytesPath, default: _T, /) -> int | _T: ... + def has_cpu_freq() -> bool: ... + def heap_info() -> tuple[int, int]: ... + def heap_trim() -> None: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def per_cpu_times() -> list[tuple[float, float, float, float]]: ... + def pids() -> list[int]: ... + def sensors_battery() -> tuple[int, int, int]: ... + def swap_mem() -> tuple[int, int, int, int, int]: ... + def virtual_mem() -> tuple[int, int, int, int, int, int]: ... + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_posix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_posix.pyi deleted file mode 100644 index 2a58ee9a43..0000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_posix.pyi +++ /dev/null @@ -1,34 +0,0 @@ -import sys -from typing import Any - -if sys.platform == "linux": - RLIMIT_AS: int - RLIMIT_CORE: int - RLIMIT_CPU: int - RLIMIT_DATA: int - RLIMIT_FSIZE: int - RLIMIT_LOCKS: int - RLIMIT_MEMLOCK: int - RLIMIT_MSGQUEUE: int - RLIMIT_NICE: int - RLIMIT_NOFILE: int - RLIMIT_NPROC: int - RLIMIT_RSS: int - RLIMIT_RTPRIO: int - RLIMIT_RTTIME: int - RLIMIT_SIGPENDING: int - RLIMIT_STACK: int - RLIM_INFINITY: int - -def getpagesize(*args, **kwargs) -> Any: ... -def getpriority(*args, **kwargs) -> Any: ... -def net_if_addrs(*args, **kwargs) -> Any: ... -def net_if_flags(*args, **kwargs) -> Any: ... -def net_if_is_running(*args, **kwargs) -> Any: ... -def net_if_mtu(*args, **kwargs) -> Any: ... - -if sys.platform == "darwin": - AF_LINK: int - def net_if_duplex_speed(*args, **kwargs): ... - -def setpriority(*args, **kwargs) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_sunos.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_sunos.pyi new file mode 100644 index 0000000000..fe56efa827 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_sunos.pyi @@ -0,0 +1,63 @@ +import sys + +# sys.platform.startswith(("sunos", "solaris")): +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + from typing import Final + + AF_LINK: Final[int] + + def getpagesize() -> int: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, str | None, str | None]]: ... + def net_if_flags(nic_name: str, /) -> list[str]: ... + def net_if_is_running(nic_name: str, /) -> bool: ... + def net_if_mtu(nic_name: str, /) -> int: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + def users() -> list[tuple[str, str, str, float, int]]: ... + + version: Final[int] + # They could be different between different versions of SunOS/Solaris: + SSLEEP: Final[int] + SRUN: Final[int] + SZOMB: Final[int] + SSTOP: Final[int] + SIDL: Final[int] + SONPROC: Final[int] + SWAIT: Final[int] + PRNODEV: Final[int] + TCPS_CLOSED: Final[int] + TCPS_CLOSING: Final[int] + TCPS_CLOSE_WAIT: Final[int] + TCPS_LISTEN: Final[int] + TCPS_ESTABLISHED: Final[int] + TCPS_SYN_SENT: Final[int] + TCPS_SYN_RCVD: Final[int] + TCPS_FIN_WAIT_1: Final[int] + TCPS_FIN_WAIT_2: Final[int] + TCPS_LAST_ACK: Final[int] + TCPS_TIME_WAIT: Final[int] + TCPS_IDLE: Final[int] + TCPS_BOUND: Final[int] + PSUTIL_CONN_NONE: Final = 128 + + def proc_basic_info(pid: int, procfs_path: str, /) -> tuple[int, int, int, float, int, int, int, int, int, int, int, int]: ... + def proc_cpu_num(pid: int, procfs_path: str, /) -> int: ... + def proc_cpu_times(pid: int, procfs_path: str, /) -> tuple[float, float, float, float]: ... + def proc_cred(pid: int, procfs_path: str, /) -> tuple[int, int, int, int, int, int]: ... + def proc_environ(pid: int, procfs_path: str, /) -> dict[str, str]: ... + def proc_memory_maps(pid: int, procfs_path: str, /) -> list[tuple[int, int, str, str, int, int, int]]: ... + def proc_name_and_args(pid: int, procfs_path: str, /) -> tuple[str, list[str] | None]: ... + def proc_num_ctx_switches(pid: int, procfs_path: str, /) -> tuple[int, int]: ... + def query_process_thread(pid: int, tid: int, procfs_path: str, /) -> tuple[float, float]: ... + def boot_time() -> float: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> tuple[int, int, int, int]: ... + def disk_io_counters() -> dict[str, tuple[int, int, int, int, int, int]]: ... + def disk_partitions() -> list[tuple[str, str, str, str]]: ... + def net_connections(pid: int, /) -> list[tuple[int, int, int, tuple[str, int], tuple[str, int] | tuple[()], int, int]]: ... + def net_if_stats() -> dict[str, tuple[bool, int, int, int]]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def per_cpu_times() -> list[tuple[float, float, float, float]]: ... + def swap_mem() -> tuple[int, int]: ... + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_windows.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_windows.pyi index a65eb03ed5..f48ed77998 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_windows.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psutil_windows.pyi @@ -1,94 +1,105 @@ -from typing import Final +import sys -ABOVE_NORMAL_PRIORITY_CLASS: Final = 32768 -BELOW_NORMAL_PRIORITY_CLASS: Final = 16384 -ERROR_ACCESS_DENIED: int -ERROR_INVALID_NAME: int -ERROR_PRIVILEGE_NOT_HELD: int -ERROR_SERVICE_DOES_NOT_EXIST: int -HIGH_PRIORITY_CLASS: Final = 128 -IDLE_PRIORITY_CLASS: Final = 64 -INFINITE: int -MIB_TCP_STATE_CLOSED: int -MIB_TCP_STATE_CLOSE_WAIT: int -MIB_TCP_STATE_CLOSING: int -MIB_TCP_STATE_DELETE_TCB: int -MIB_TCP_STATE_ESTAB: int -MIB_TCP_STATE_FIN_WAIT1: int -MIB_TCP_STATE_FIN_WAIT2: int -MIB_TCP_STATE_LAST_ACK: int -MIB_TCP_STATE_LISTEN: int -MIB_TCP_STATE_SYN_RCVD: int -MIB_TCP_STATE_SYN_SENT: int -MIB_TCP_STATE_TIME_WAIT: int -NORMAL_PRIORITY_CLASS: Final = 32 -PSUTIL_CONN_NONE: int -REALTIME_PRIORITY_CLASS: Final = 256 -WINDOWS_10: int -WINDOWS_7: int -WINDOWS_8: int -WINDOWS_8_1: int -WINDOWS_VISTA: int -WINVER: int -version: int +if sys.platform == "win32": + from collections.abc import Sequence + from socket import AddressFamily, SocketKind + from typing import Final -class TimeoutAbandoned(Exception): ... -class TimeoutExpired(Exception): ... + version: Final[int] + ABOVE_NORMAL_PRIORITY_CLASS: Final = 32768 + BELOW_NORMAL_PRIORITY_CLASS: Final = 16384 + HIGH_PRIORITY_CLASS: Final = 128 + IDLE_PRIORITY_CLASS: Final = 64 + NORMAL_PRIORITY_CLASS: Final = 32 + REALTIME_PRIORITY_CLASS: Final = 256 + MIB_TCP_STATE_CLOSED: Final = 1 + MIB_TCP_STATE_CLOSING: Final = 9 + MIB_TCP_STATE_CLOSE_WAIT: Final = 8 + MIB_TCP_STATE_LISTEN: Final = 2 + MIB_TCP_STATE_ESTAB: Final = 5 + MIB_TCP_STATE_SYN_SENT: Final = 3 + MIB_TCP_STATE_SYN_RCVD: Final = 4 + MIB_TCP_STATE_FIN_WAIT1: Final = 6 + MIB_TCP_STATE_FIN_WAIT2: Final = 7 + MIB_TCP_STATE_LAST_ACK: Final = 10 + MIB_TCP_STATE_TIME_WAIT: Final = 11 + MIB_TCP_STATE_DELETE_TCB: Final = 12 + PSUTIL_CONN_NONE: Final = 128 + INFINITE: Final[int] + ERROR_ACCESS_DENIED: Final = 5 + ERROR_INVALID_NAME: Final = 123 + ERROR_SERVICE_DOES_NOT_EXIST: Final = 1060 + ERROR_PRIVILEGE_NOT_HELD: Final = 1314 + WINVER: Final[int] + WINDOWS_VISTA: Final = 60 + WINDOWS_7: Final = 61 + WINDOWS_8: Final = 62 + WINDOWS_8_1: Final = 63 + WINDOWS_10: Final = 100 -def QueryDosDevice(*args, **kwargs): ... # incomplete -def boot_time(*args, **kwargs): ... # incomplete -def check_pid_range(pid: int, /) -> None: ... -def cpu_count_cores(*args, **kwargs): ... # incomplete -def cpu_count_logical(*args, **kwargs): ... # incomplete -def cpu_freq(*args, **kwargs): ... # incomplete -def cpu_stats(*args, **kwargs): ... # incomplete -def cpu_times(*args, **kwargs): ... # incomplete -def disk_io_counters(*args, **kwargs): ... # incomplete -def disk_partitions(*args, **kwargs): ... # incomplete -def disk_usage(*args, **kwargs): ... # incomplete -def getloadavg(*args, **kwargs): ... # incomplete -def getpagesize(*args, **kwargs): ... # incomplete -def init_loadavg_counter(*args, **kwargs): ... # incomplete -def net_connections(*args, **kwargs): ... # incomplete -def net_if_addrs(*args, **kwargs): ... # incomplete -def net_if_stats(*args, **kwargs): ... # incomplete -def net_io_counters(*args, **kwargs): ... # incomplete -def per_cpu_times(*args, **kwargs): ... # incomplete -def pid_exists(*args, **kwargs): ... # incomplete -def pids(*args, **kwargs): ... # incomplete -def ppid_map(*args, **kwargs): ... # incomplete -def proc_cmdline(*args, **kwargs): ... # incomplete -def proc_cpu_affinity_get(*args, **kwargs): ... # incomplete -def proc_cpu_affinity_set(*args, **kwargs): ... # incomplete -def proc_cwd(*args, **kwargs): ... # incomplete -def proc_environ(*args, **kwargs): ... # incomplete -def proc_exe(*args, **kwargs): ... # incomplete -def proc_info(*args, **kwargs): ... # incomplete -def proc_io_counters(*args, **kwargs): ... # incomplete -def proc_io_priority_get(*args, **kwargs): ... # incomplete -def proc_io_priority_set(*args, **kwargs): ... # incomplete -def proc_is_suspended(*args, **kwargs): ... # incomplete -def proc_kill(*args, **kwargs): ... # incomplete -def proc_memory_info(*args, **kwargs): ... # incomplete -def proc_memory_maps(*args, **kwargs): ... # incomplete -def proc_memory_uss(*args, **kwargs): ... # incomplete -def proc_num_handles(*args, **kwargs): ... # incomplete -def proc_open_files(*args, **kwargs): ... # incomplete -def proc_priority_get(*args, **kwargs): ... # incomplete -def proc_priority_set(*args, **kwargs): ... # incomplete -def proc_suspend_or_resume(*args, **kwargs): ... # incomplete -def proc_threads(*args, **kwargs): ... # incomplete -def proc_times(*args, **kwargs): ... # incomplete -def proc_username(*args, **kwargs): ... # incomplete -def proc_wait(*args, **kwargs): ... # incomplete -def sensors_battery(*args, **kwargs): ... # incomplete -def set_debug(*args, **kwargs): ... # incomplete -def swap_percent(*args, **kwargs): ... # incomplete -def users(*args, **kwargs): ... # incomplete -def virtual_mem(*args, **kwargs): ... # incomplete -def winservice_enumerate(*args, **kwargs): ... # incomplete -def winservice_query_config(*args, **kwargs): ... # incomplete -def winservice_query_descr(*args, **kwargs): ... # incomplete -def winservice_query_status(*args, **kwargs): ... # incomplete -def winservice_start(*args, **kwargs): ... # incomplete -def winservice_stop(*args, **kwargs): ... # incomplete + class TimeoutAbandoned(Exception): ... + class TimeoutExpired(Exception): ... + + def proc_cmdline(pid: int, use_peb: bool = True) -> list[str]: ... + def proc_cpu_affinity_get(pid: int, /) -> int: ... + def proc_cpu_affinity_set(pid: int, mask: int, /) -> None: ... + def proc_cwd(pid: int, /) -> str: ... + def proc_environ(pid: int, /) -> str: ... + def proc_exe(pid: int, /) -> str: ... + def proc_io_counters(pid: int, /) -> tuple[int, int, int, int, int, int]: ... + def proc_io_priority_get(pid: int, /) -> int: ... + def proc_io_priority_set(pid: int, priority: int, /) -> None: ... + def proc_is_suspended(pid: int, /) -> bool: ... + def proc_kill(pid: int, /) -> None: ... + def proc_memory_info(pid: int, /) -> tuple[int, int, int, int, int, int, int, int, int, int]: ... + def proc_memory_maps(pid: int, /) -> list[tuple[int, str, str, int]]: ... + def proc_memory_uss(pid: int, /) -> int: ... + def proc_num_handles(pid: int, /) -> int: ... + def proc_open_files(pid: int, /) -> list[str]: ... + def proc_priority_get(pid: int, /) -> int: ... + def proc_priority_set(pid: int, priority: int, /) -> None: ... + def proc_suspend_or_resume(pid: int, suspend: bool | None, /) -> None: ... + def proc_threads(pid: int, /) -> list[tuple[int, float, float]]: ... + def proc_times(pid: int, /) -> tuple[float, float, float]: ... + def proc_username(pid: int, /) -> tuple[str, str]: ... + def proc_wait(pid: int, timeout: int, /) -> int | None: ... + def proc_info( + pid: int, / + ) -> tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int]: ... + def uptime() -> float: ... + def cpu_count_cores() -> int | None: ... + def cpu_count_logical() -> int | None: ... + def cpu_freq() -> tuple[int, int]: ... + def cpu_stats() -> tuple[int, int, int, int]: ... + def cpu_times() -> tuple[float, float, float]: ... + def disk_io_counters() -> dict[str, tuple[int, int, int, int, int, int]]: ... + def disk_partitions(all: bool, /) -> list[tuple[str, str, str, str]]: ... + def disk_usage(path: str, /) -> tuple[int, int, int]: ... + def getloadavg() -> tuple[float, float, float]: ... + def getpagesize() -> int: ... + def swap_percent() -> float: ... + def init_loadavg_counter() -> None: ... + def heap_info() -> tuple[int, int, int]: ... + def heap_trim() -> int: ... + def net_connections( + pid: int, af_filter: Sequence[AddressFamily | int | None], type_filter: Sequence[SocketKind | int | None], / + ) -> list[tuple[int, int, int, tuple[str | None, int], tuple[str | None, int], int, int]]: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, None, None]]: ... + def net_if_stats() -> dict[str, tuple[bool, int, int, int]]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def per_cpu_times() -> list[tuple[float, float, float, float, float]]: ... + def pid_exists(pid: int, /) -> bool: ... + def pids() -> list[int]: ... + def ppid_map() -> dict[int, int]: ... + def sensors_battery() -> tuple[int, int, int, int]: ... + def users() -> list[tuple[str, str | None, float]]: ... + def virtual_mem() -> tuple[int, int, int, int]: ... + def winservice_enumerate() -> list[tuple[str, str]]: ... + def winservice_query_config(service_name: str, /) -> tuple[str, str, str, str]: ... + def winservice_query_descr(service_name: str, /) -> str: ... + def winservice_query_status(service_name: str, /) -> tuple[str, int] | str: ... + def winservice_start(service_name: str, /) -> None: ... + def winservice_stop(service_name: str, /) -> None: ... + def QueryDosDevice(device_path: str, /) -> str: ... + def check_pid_range(pid: int, /) -> None: ... + def set_debug(value: bool, /) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pswindows.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pswindows.pyi index afd5f49b1a..77e4728ddb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pswindows.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_pswindows.pyi @@ -1,200 +1,175 @@ -import enum -from _typeshed import Incomplete -from collections.abc import Iterable -from typing import Any, NamedTuple - -from psutil import _psutil_windows -from psutil._common import ( - ENCODING as ENCODING, - AccessDenied as AccessDenied, - NoSuchProcess as NoSuchProcess, - TimeoutExpired as TimeoutExpired, - conn_tmap as conn_tmap, - conn_to_ntuple as conn_to_ntuple, - debug as debug, - isfile_strict as isfile_strict, - memoize as memoize, - parse_environ_block as parse_environ_block, - usage_percent as usage_percent, -) -from psutil._psutil_windows import ( - ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, - BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, - HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, - IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, - NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, - REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, -) - -__extra__all__: Any -CONN_DELETE_TCB: str -ERROR_PARTIAL_COPY: int -PYPY: Any -AF_LINK: int -AddressFamily: Any -TCP_STATUSES: Any - -# These noqas workaround https://github.com/astral-sh/ruff/issues/10874 -class Priority(enum.IntEnum): - ABOVE_NORMAL_PRIORITY_CLASS = _psutil_windows.ABOVE_NORMAL_PRIORITY_CLASS # noqa: F811 - BELOW_NORMAL_PRIORITY_CLASS = _psutil_windows.BELOW_NORMAL_PRIORITY_CLASS # noqa: F811 - HIGH_PRIORITY_CLASS = _psutil_windows.HIGH_PRIORITY_CLASS # noqa: F811 - IDLE_PRIORITY_CLASS = _psutil_windows.IDLE_PRIORITY_CLASS # noqa: F811 - NORMAL_PRIORITY_CLASS = _psutil_windows.NORMAL_PRIORITY_CLASS # noqa: F811 - REALTIME_PRIORITY_CLASS = _psutil_windows.REALTIME_PRIORITY_CLASS # noqa: F811 - -IOPRIO_VERYLOW: int -IOPRIO_LOW: int -IOPRIO_NORMAL: int -IOPRIO_HIGH: int - -class IOPriority(enum.IntEnum): - IOPRIO_VERYLOW = 0 - IOPRIO_LOW = 1 - IOPRIO_NORMAL = 2 - IOPRIO_HIGH = 3 - -pinfo_map: Any - -class scputimes(NamedTuple): - user: float - system: float - idle: float - interrupt: float - dpc: float - -class svmem(NamedTuple): - total: int - available: int - percent: float - used: int - free: int - -class pmem(NamedTuple): - rss: Any - vms: Any - num_page_faults: Any - peak_wset: Any - wset: Any - peak_paged_pool: Any - paged_pool: Any - peak_nonpaged_pool: Any - nonpaged_pool: Any - pagefile: Any - peak_pagefile: Any - private: Any - -class pfullmem(NamedTuple): - rss: Incomplete - vms: Incomplete - num_page_faults: Incomplete - peak_wset: Incomplete - wset: Incomplete - peak_paged_pool: Incomplete - paged_pool: Incomplete - peak_nonpaged_pool: Incomplete - nonpaged_pool: Incomplete - pagefile: Incomplete - peak_pagefile: Incomplete - private: Incomplete - uss: Incomplete - -class pmmap_grouped(NamedTuple): - path: Any - rss: Any - -pmmap_ext: Any - -class pio(NamedTuple): - read_count: Any - write_count: Any - read_bytes: Any - write_bytes: Any - other_count: Any - other_bytes: Any - -def convert_dos_path(s): ... -def getpagesize(): ... -def virtual_memory() -> svmem: ... -def swap_memory(): ... - -disk_io_counters: Any - -def disk_usage(path): ... -def disk_partitions(all): ... -def cpu_times(): ... -def per_cpu_times(): ... -def cpu_count_logical(): ... -def cpu_count_cores() -> int | None: ... -def cpu_stats(): ... -def cpu_freq(): ... -def getloadavg(): ... -def net_connections(kind, _pid: int = -1): ... -def net_if_stats(): ... -def net_io_counters(): ... -def net_if_addrs(): ... -def sensors_battery(): ... -def boot_time(): ... -def users(): ... -def win_service_iter() -> Iterable[WindowsService]: ... -def win_service_get(name): ... - -class WindowsService: - def __init__(self, name, display_name) -> None: ... - def __eq__(self, other): ... - def __ne__(self, other): ... - def name(self): ... - def display_name(self): ... - def binpath(self): ... - def username(self): ... - def start_type(self): ... - def pid(self): ... - def status(self): ... - def description(self): ... - def as_dict(self): ... - -pids: Any -pid_exists: Any -ppid_map: Any - -def is_permission_err(exc): ... -def convert_oserror(exc, pid=None, name=None): ... -def wrap_exceptions(fun): ... -def retry_error_partial_copy(fun): ... - -class Process: - pid: Any - def __init__(self, pid) -> None: ... - def oneshot_enter(self) -> None: ... - def oneshot_exit(self) -> None: ... - def name(self): ... - def exe(self): ... - def cmdline(self): ... - def environ(self): ... - def ppid(self): ... - def memory_info(self): ... - def memory_full_info(self): ... - def memory_maps(self) -> None: ... - def kill(self): ... - def send_signal(self, sig) -> None: ... - def wait(self, timeout=None): ... - def username(self): ... - def create_time(self, fast_only: bool = False): ... - def num_threads(self): ... - def threads(self): ... - def cpu_times(self): ... - def suspend(self) -> None: ... - def resume(self) -> None: ... - def cwd(self): ... - def open_files(self): ... - def net_connections(self, kind: str = "inet"): ... - def nice_get(self): ... - def nice_set(self, value): ... - def ionice_get(self): ... - def ionice_set(self, ioclass, value) -> None: ... - def io_counters(self) -> pio: ... - def status(self): ... - def cpu_affinity_get(self): ... - def cpu_affinity_set(self, value): ... - def num_handles(self): ... - def num_ctx_switches(self): ... +import sys + +if sys.platform == "win32": + import enum + from collections.abc import Callable, Iterable, Iterator + from signal import Signals + from typing import Final, Literal, TypedDict, TypeVar, overload, type_check_only + from typing_extensions import ParamSpec + + from psutil import _psutil_windows + from psutil._common import ( + ENCODING as ENCODING, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + TimeoutExpired as TimeoutExpired, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + debug as debug, + isfile_strict as isfile_strict, + memoize as memoize, + memoize_when_activated as memoize_when_activated, + parse_environ_block as parse_environ_block, + usage_percent as usage_percent, + ) + from psutil._psutil_windows import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, + ) + + from . import _ntuples as ntp + + __extra__all__: Final[list[str]] + CONN_DELETE_TCB: Final = "DELETE_TCB" + ERROR_PARTIAL_COPY: Final = 299 + PYPY: Final[bool] + + class AddressFamily(enum.IntEnum): + AF_LINK = -1 + + AF_LINK: Final = AddressFamily.AF_LINK + TCP_STATUSES: Final[dict[int, str]] + + # These noqas workaround https://github.com/astral-sh/ruff/issues/10874 + class Priority(enum.IntEnum): + ABOVE_NORMAL_PRIORITY_CLASS = _psutil_windows.ABOVE_NORMAL_PRIORITY_CLASS # noqa: F811 + BELOW_NORMAL_PRIORITY_CLASS = _psutil_windows.BELOW_NORMAL_PRIORITY_CLASS # noqa: F811 + HIGH_PRIORITY_CLASS = _psutil_windows.HIGH_PRIORITY_CLASS # noqa: F811 + IDLE_PRIORITY_CLASS = _psutil_windows.IDLE_PRIORITY_CLASS # noqa: F811 + NORMAL_PRIORITY_CLASS = _psutil_windows.NORMAL_PRIORITY_CLASS # noqa: F811 + REALTIME_PRIORITY_CLASS = _psutil_windows.REALTIME_PRIORITY_CLASS # noqa: F811 + + class IOPriority(enum.IntEnum): + IOPRIO_VERYLOW = 0 + IOPRIO_LOW = 1 + IOPRIO_NORMAL = 2 + IOPRIO_HIGH = 3 + + IOPRIO_VERYLOW: Final = IOPriority.IOPRIO_VERYLOW + IOPRIO_LOW: Final = IOPriority.IOPRIO_LOW + IOPRIO_NORMAL: Final = IOPriority.IOPRIO_NORMAL + IOPRIO_HIGH: Final = IOPriority.IOPRIO_HIGH + + pinfo_map: Final[dict[str, int]] + + _P = ParamSpec("_P") + _R = TypeVar("_R") + + def convert_dos_path(s: str) -> str: ... + def getpagesize() -> int: ... + def virtual_memory() -> ntp.svmem: ... + def swap_memory() -> ntp.sswap: ... + + heap_info = _psutil_windows.heap_info + heap_trim = _psutil_windows.heap_trim + disk_io_counters = _psutil_windows.disk_io_counters + + def disk_usage(path: str) -> ntp.sdiskusage: ... + def disk_partitions(all: bool) -> list[ntp.sdiskpart]: ... + def cpu_times() -> ntp.scputimes: ... + def per_cpu_times() -> list[ntp.scputimes]: ... + def cpu_count_logical() -> int | None: ... + def cpu_count_cores() -> int | None: ... + def cpu_stats() -> ntp.scpustats: ... + def cpu_freq() -> list[ntp.scpufreq]: ... + def getloadavg() -> tuple[float, float, float]: ... + @overload + def net_connections(kind: str, _pid: Literal[-1] = -1) -> list[ntp.sconn]: ... + @overload + def net_connections(kind: str, _pid: int = -1) -> list[ntp.pconn]: ... + def net_if_stats() -> dict[str, ntp.snicstats]: ... + def net_io_counters() -> dict[str, tuple[int, int, int, int, int, int, int, int]]: ... + def net_if_addrs() -> list[tuple[str, int, str, str | None, None, None]]: ... + def sensors_battery() -> ntp.sbattery | None: ... + def boot_time() -> float: ... + def users() -> list[ntp.suser]: ... + def win_service_iter() -> Iterator[WindowsService]: ... + def win_service_get(name: str) -> WindowsService: ... + @type_check_only + class _WindowsServiceAttrs(TypedDict): + name: str + display_name: str | None + description: str + binpath: str + username: str + start_type: str + status: str + pid: int | None + + class WindowsService: + def __init__(self, name: str, display_name: str | None) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def name(self) -> str: ... + def display_name(self) -> str | None: ... + def binpath(self) -> str: ... + def username(self) -> str: ... + def start_type(self) -> str: ... + def pid(self) -> int: ... + def status(self) -> str: ... + def description(self) -> str: ... + def as_dict(self) -> _WindowsServiceAttrs: ... + + pids = _psutil_windows.pids + pid_exists = _psutil_windows.pid_exists + ppid_map = _psutil_windows.ppid_map + + def is_permission_err(exc: OSError) -> bool: ... + @overload + def convert_oserror(exc: PermissionError, pid: int | None = None, name: str | None = None) -> AccessDenied: ... + @overload + def convert_oserror(exc: OSError, pid: int | None = None, name: str | None = None) -> AccessDenied | NoSuchProcess: ... + def wrap_exceptions(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + def retry_error_partial_copy(fun: Callable[_P, _R]) -> Callable[_P, _R]: ... + + class Process: + __slots__ = ["_cache", "_name", "_ppid", "pid"] + pid: int + def __init__(self, pid: int) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def environ(self) -> dict[str, str]: ... + def ppid(self) -> int: ... + def memory_info(self) -> ntp.pmem: ... + def memory_full_info(self) -> ntp.pfullmem: ... + def memory_maps(self) -> Iterator[tuple[str, str, str, int]]: ... + def kill(self) -> None: ... + def send_signal(self, sig: Literal[Signals.SIGTERM, Signals.CTRL_C_EVENT, Signals.CTRL_BREAK_EVENT]) -> None: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def username(self) -> str: ... + def create_time(self, fast_only: bool = False) -> float: ... + def num_threads(self) -> int: ... + def threads(self) -> list[ntp.pthread]: ... + def cpu_times(self) -> ntp.pcputimes: ... + def suspend(self) -> None: ... + def resume(self) -> None: ... + def cwd(self) -> str: ... + def open_files(self) -> list[ntp.popenfile]: ... + def net_connections(self, kind: str = "inet") -> list[ntp.pconn]: ... + def nice_get(self) -> Priority: ... + def nice_set(self, value: int) -> None: ... + def ionice_get(self) -> IOPriority: ... + def ionice_set(self, ioclass: int, value: None) -> None: ... + def io_counters(self) -> ntp.pio: ... + def status(self) -> Literal["stopped", "running"]: ... + def cpu_affinity_get(self) -> list[int]: ... + def cpu_affinity_set(self, value: Iterable[int]) -> None: ... + def num_handles(self) -> int: ... + def num_ctx_switches(self) -> ntp.pctxsw: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi index 36a44e6ecb..f5920dd856 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Incomplete -from typing import Final, final +from typing import Any, Final, final from typing_extensions import Self version: str @@ -22,8 +22,10 @@ class Curl: def perform(self) -> None: ... def perform_rb(self) -> bytes: ... def perform_rs(self) -> str: ... - def getinfo(self, info): ... - def getinfo_raw(self, info): ... + # For getinfo and getinfo_raw, the exact return type depends on the passed value: + # http://pycurl.io/docs/latest/curlobject.html#pycurl.Curl.getinfo + def getinfo(self, info: int) -> Any: ... + def getinfo_raw(self, info: int) -> Any: ... def reset(self) -> None: ... def unsetopt(self, option: int): ... def pause(self, bitmask): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyinstaller/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyinstaller/METADATA.toml index 0ec68a151e..e7a906e9ad 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyinstaller/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyinstaller/METADATA.toml @@ -1,2 +1,2 @@ -version = "6.16.*" +version = "6.17.*" upstream_repository = "https://github.com/pyinstaller/pyinstaller" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-crontab/crontab.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-crontab/crontab.pyi index c228171d38..a33bb1193e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-crontab/crontab.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-crontab/crontab.pyi @@ -1,6 +1,6 @@ import re import subprocess -from _typeshed import Incomplete, StrPath +from _typeshed import StrPath from builtins import range as _range from collections import OrderedDict from collections.abc import Callable, Generator, Iterable, Iterator @@ -66,7 +66,7 @@ class CronTab: crons: list[CronItem] | None filen: str | None cron_command: str - env: OrderedVariableList[Incomplete, Incomplete] | None + env: OrderedVariableList[str, str] | None root: bool intab: str | None tabfile: str | None @@ -132,7 +132,7 @@ class CronItem: comment: str command: str | None last_run: datetime | None - env: OrderedVariableList[Incomplete, Incomplete] + env: OrderedVariableList[str, str] pre_comment: bool marker: str | None stdin: str | None @@ -141,7 +141,7 @@ class CronItem: def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... @classmethod - def from_line(cls, line: str, user: str | None = ..., cron: Incomplete | None = ...) -> Self: ... + def from_line(cls, line: str, user: str | None = ..., cron: CronTab | None = ...) -> Self: ... def delete(self) -> None: ... def set_command(self, cmd: str, parse_stdin: bool = ...) -> None: ... def set_comment(self, cmt: str, pre_comment: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/_parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/_parser.pyi index e4784c89c1..9119ec25d7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/_parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/_parser.pyi @@ -1,5 +1,5 @@ import re -from _typeshed import Incomplete, SupportsRead +from _typeshed import SupportsRead from collections.abc import Callable, Mapping from datetime import _TzInfo, datetime from io import StringIO @@ -58,22 +58,21 @@ class parserinfo: def convertyear(self, year: int, century_specified: bool = False) -> int: ... def validate(self, res: datetime) -> bool: ... -class _ymd(list[Incomplete]): +class _ymd(list[int]): century_specified: bool dstridx: int | None mstridx: int | None ystridx: int | None - def __init__(self, *args, **kwargs) -> None: ... @property def has_year(self) -> bool: ... @property def has_month(self) -> bool: ... @property def has_day(self) -> bool: ... - def could_be_day(self, value): ... - def append(self, val, label=None): ... - def _resolve_from_stridxs(self, strids): ... - def resolve_ymd(self, yearfirst: bool | None, dayfirst: bool | None): ... + def could_be_day(self, value: int) -> bool: ... + def append(self, val: str | int, label: str | None = None) -> None: ... + def _resolve_from_stridxs(self, strids: dict[str, int]) -> tuple[int, int, int]: ... + def resolve_ymd(self, yearfirst: bool | None, dayfirst: bool | None) -> tuple[int, int, int]: ... class parser: info: parserinfo diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi index 53bdbe4ba4..dd712283fd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi @@ -1,9 +1,10 @@ import datetime -from _typeshed import Incomplete -from collections.abc import Generator, Iterable, Iterator, Sequence +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence from typing import Final, Literal, overload from typing_extensions import Self, TypeAlias +from dateutil.parser._parser import _TzInfos + from ._common import weekday as weekdaybase __all__ = [ @@ -58,13 +59,15 @@ SU: weekday class rrulebase: def __init__(self, cache: bool | None = False) -> None: ... def __iter__(self) -> Iterator[datetime.datetime]: ... - def __getitem__(self, item): ... - def __contains__(self, item) -> bool: ... + def __getitem__(self, item: int | slice) -> datetime.datetime: ... + def __contains__(self, item: datetime.datetime) -> bool: ... def count(self) -> int | None: ... - def before(self, dt, inc: bool = False): ... - def after(self, dt, inc: bool = False): ... - def xafter(self, dt, count=None, inc: bool = False) -> Generator[Incomplete]: ... - def between(self, after, before, inc: bool = False, count: int = 1) -> list[Incomplete]: ... + def before(self, dt: datetime.datetime, inc: bool = False): ... + def after(self, dt: datetime.datetime, inc: bool = False): ... + def xafter(self, dt: datetime.datetime, count: int | None = None, inc: bool = False) -> Generator[datetime.datetime]: ... + def between( + self, after: datetime.datetime, before: datetime.datetime, inc: bool = False, count: int = 1 + ) -> list[datetime.datetime]: ... class rrule(rrulebase): def __init__( @@ -156,22 +159,22 @@ class _iterinfo: class rruleset(rrulebase): class _genitem: - dt: Incomplete - genlist: list[Incomplete] - gen: Incomplete - def __init__(self, genlist, gen) -> None: ... + dt: datetime.datetime + genlist: list[Self] + gen: Iterator[datetime.datetime] + def __init__(self, genlist: list[Self], gen: Iterator[datetime.datetime]) -> None: ... def __next__(self) -> None: ... next = __next__ - def __lt__(self, other) -> bool: ... - def __gt__(self, other) -> bool: ... - def __eq__(self, other) -> bool: ... - def __ne__(self, other) -> bool: ... + def __lt__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... def __init__(self, cache: bool | None = False) -> None: ... def rrule(self, rrule: _RRule) -> None: ... - def rdate(self, rdate) -> None: ... - def exrule(self, exrule) -> None: ... - def exdate(self, exdate) -> None: ... + def rdate(self, rdate: datetime.datetime) -> None: ... + def exrule(self, exrule: _RRule) -> None: ... + def exdate(self, exdate: datetime.datetime) -> None: ... class _rrulestr: @overload @@ -185,8 +188,8 @@ class _rrulestr: unfold: bool = False, compatible: bool = False, ignoretz: bool = False, - tzids=None, - tzinfos=None, + tzids: Callable[[str], datetime.tzinfo] | Mapping[str, datetime.tzinfo] | None = None, + tzinfos: _TzInfos | None = None, ) -> rruleset: ... @overload def __call__( @@ -199,8 +202,8 @@ class _rrulestr: unfold: bool = False, forceset: bool = False, ignoretz: bool = False, - tzids=None, - tzinfos=None, + tzids: Callable[[str], datetime.tzinfo] | Mapping[str, datetime.tzinfo] | None = None, + tzinfos: _TzInfos | None = None, ) -> rruleset: ... @overload def __call__( @@ -213,8 +216,8 @@ class _rrulestr: forceset: bool = False, compatible: bool = False, ignoretz: bool = False, - tzids=None, - tzinfos=None, + tzids: Callable[[str], datetime.tzinfo] | Mapping[str, datetime.tzinfo] | None = None, + tzinfos: _TzInfos | None = None, ) -> rrule | rruleset: ... rrulestr: _rrulestr diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/_common.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/_common.pyi index 7e9b8957c3..23bce452e8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/_common.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/_common.pyi @@ -1,12 +1,17 @@ import abc +from collections.abc import Callable from datetime import datetime, timedelta, tzinfo -from typing import ClassVar +from typing import ClassVar, TypeVar +from typing_extensions import ParamSpec ZERO: timedelta __all__ = ["tzname_in_python2", "enfold"] -def tzname_in_python2(namefunc): ... +_P = ParamSpec("_P") +_R = TypeVar("_R") + +def tzname_in_python2(namefunc: Callable[_P, _R]) -> Callable[_P, _R]: ... def enfold(dt: datetime, fold: int = 1): ... # Doesn't actually have ABCMeta as the metaclass at runtime, @@ -24,5 +29,5 @@ class tzrangebase(_tzinfo): def fromutc(self, dt: datetime) -> datetime: ... def is_ambiguous(self, dt: datetime) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... + def __ne__(self, other: object) -> bool: ... __reduce__ = object.__reduce__ diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi index 475ace6a4b..0fad227656 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi @@ -1,6 +1,8 @@ import sys +from _typeshed import Unused from datetime import datetime, timedelta, tzinfo -from typing import ClassVar, Literal, Protocol, TypeVar, type_check_only +from typing import Any, ClassVar, Literal, Protocol, TypeVar, type_check_only +from typing_extensions import Self from ..relativedelta import relativedelta from ._common import _tzinfo, enfold as enfold, tzrangebase @@ -23,26 +25,26 @@ class tzutc(tzinfo): def tzname(self, dt: datetime | None) -> str: ... def is_ambiguous(self, dt: datetime | None) -> bool: ... def fromutc(self, dt: _DT) -> _DT: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... + def __ne__(self, other: object) -> bool: ... __reduce__ = object.__reduce__ UTC: tzutc class tzoffset(tzinfo): - def __init__(self, name, offset) -> None: ... + def __init__(self, name: str | None, offset: float | timedelta) -> None: ... def utcoffset(self, dt: datetime | None) -> timedelta | None: ... def dst(self, dt: datetime | None) -> timedelta | None: ... def is_ambiguous(self, dt: datetime | None) -> bool: ... def tzname(self, dt: datetime | None) -> str: ... def fromutc(self, dt: _DT) -> _DT: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... + def __ne__(self, other: object) -> bool: ... __reduce__ = object.__reduce__ @classmethod - def instance(cls, name, offset) -> tzoffset: ... + def instance(cls, name: str | None, offset: float | timedelta) -> tzoffset: ... class tzlocal(_tzinfo): def __init__(self) -> None: ... @@ -50,9 +52,9 @@ class tzlocal(_tzinfo): def dst(self, dt: datetime | None) -> timedelta | None: ... def tzname(self, dt: datetime | None) -> str: ... def is_ambiguous(self, dt: datetime | None) -> bool: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... + def __ne__(self, other: object) -> bool: ... __reduce__ = object.__reduce__ class _ttinfo: @@ -65,9 +67,9 @@ class _ttinfo: isgmt: bool dstoffset: timedelta def __init__(self) -> None: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... + def __ne__(self, other: object) -> bool: ... @type_check_only class _TZFileReader(Protocol): @@ -82,11 +84,11 @@ class tzfile(_tzinfo): def utcoffset(self, dt: datetime | None) -> timedelta | None: ... def dst(self, dt: datetime | None) -> timedelta | None: ... def tzname(self, dt: datetime | None) -> str: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __ne__(self, other): ... - def __reduce__(self): ... - def __reduce_ex__(self, protocol): ... + def __ne__(self, other: object) -> bool: ... + def __reduce__(self) -> tuple[type[Self], tuple[None, str], dict[str, Any]]: ... + def __reduce_ex__(self, protocol: Unused) -> tuple[type[Self], tuple[None, str], dict[str, Any]]: ... class tzrange(tzrangebase): hasdst: bool @@ -100,13 +102,13 @@ class tzrange(tzrangebase): end: relativedelta | None = None, ) -> None: ... def transitions(self, year: int) -> tuple[datetime, datetime]: ... - def __eq__(self, other): ... + def __eq__(self, other: object) -> bool: ... class tzstr(tzrange): hasdst: bool def __init__(self, s: str, posix_offset: bool = False) -> None: ... @classmethod - def instance(cls, name, offset) -> tzoffset: ... + def instance(cls, name: str | None, offset: float | timedelta) -> tzoffset: ... @type_check_only class _ICalReader(Protocol): @@ -116,8 +118,8 @@ class _ICalReader(Protocol): class tzical: def __init__(self, fileobj: str | _ICalReader) -> None: ... - def keys(self): ... - def get(self, tzid=None): ... + def keys(self) -> list[str]: ... + def get(self, tzid: str | None = None) -> tzinfo | None: ... TZFILES: list[str] TZPATHS: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/win.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/win.pyi index bae9c4e995..8d02d23acb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/win.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/win.pyi @@ -1,5 +1,5 @@ import sys -from ctypes import _Pointer, c_wchar +from ctypes import _NameTypes, _Pointer, c_wchar from datetime import datetime, timedelta from typing import Any, ClassVar, Final @@ -18,9 +18,10 @@ if sys.platform == "win32": class tzres: p_wchar: ClassVar[type[_Pointer[c_wchar]]] - def __init__(self, tzres_loc="tzres.dll"): ... - def load_name(self, offset): ... - def name_from_string(self, tzname_str: str): ... + tzres_loc: _NameTypes + def __init__(self, tzres_loc: _NameTypes = "tzres.dll") -> None: ... + def load_name(self, offset: int) -> str: ... + def name_from_string(self, tzname_str: str) -> str: ... def picknthweekday(year: int, month: int, dayofweek: int, hour: int, minute: int, whichweek: int) -> datetime: ... def valuestodict(key: _KeyType) -> dict[str, Any]: ... # keys and values in dict are results of winreg.EnumValue() function diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi index 3be08148bf..65627ecdd7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi @@ -16,7 +16,8 @@ ZONEFILENAME: Final[str] METADATA_FN: Final[str] class tzfile(_tzfile): - def __reduce__(self) -> tuple[Callable[[str], Self], tuple[str, ...]]: ... + # source code does this override, changing the type + def __reduce__(self) -> tuple[Callable[[str], Self], tuple[str]]: ... # type: ignore[override] def getzoneinfofile_stream() -> BytesIO | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi index 9aae2bdcba..a8a8655b21 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi @@ -1,4 +1,8 @@ from _typeshed import StrOrBytesPath from collections.abc import Iterable -def rebuild(filename: StrOrBytesPath, tag=None, format: str = "gz", zonegroups: Iterable[str] = [], metadata=None) -> None: ... +from ..zoneinfo import _MetadataType + +def rebuild( + filename: StrOrBytesPath, tag=None, format: str = "gz", zonegroups: Iterable[str] = [], metadata: _MetadataType | None = None +) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-jenkins/jenkins/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-jenkins/jenkins/__init__.pyi index 4dd80855bd..a605ab5c0e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-jenkins/jenkins/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-jenkins/jenkins/__init__.pyi @@ -1,8 +1,8 @@ from _typeshed import Incomplete from collections.abc import Mapping, MutableMapping, Sequence from re import Pattern -from typing import Any, Final, Literal, overload -from typing_extensions import TypeAlias, deprecated +from typing import Any, Final, Literal, TypedDict, overload, type_check_only +from typing_extensions import Required, TypeAlias, deprecated import requests from requests.models import Request, Response @@ -92,6 +92,15 @@ class WrappedSession(requests.Session): _JSONValue: TypeAlias = Any # too many possibilities to express _JSON: TypeAlias = dict[str, _JSONValue] +@type_check_only +class _Job(TypedDict, total=False): + _class: Required[str] + url: Required[str] + color: str + name: Required[str] + fullname: Required[str] + jobs: list[_Job] + class Jenkins: server: str auth: _Auth | None @@ -128,10 +137,8 @@ class Jenkins: def get_plugins_info(self, depth: int = 2) -> _JSON: ... def get_plugin_info(self, name: str, depth: int = 2) -> _JSON: ... def get_plugins(self, depth: int = 2) -> _JSON: ... - def get_jobs( - self, folder_depth: int = 0, folder_depth_per_request: int = 10, view_name: str | None = None - ) -> list[dict[str, str]]: ... - def get_all_jobs(self, folder_depth: int | None = None, folder_depth_per_request: int = 10) -> list[dict[str, str]]: ... + def get_jobs(self, folder_depth: int = 0, folder_depth_per_request: int = 10, view_name: str | None = None) -> list[_Job]: ... + def get_all_jobs(self, folder_depth: int | None = None, folder_depth_per_request: int = 10) -> list[_Job]: ... def copy_job(self, from_name: str, to_name: str) -> None: ... def rename_job(self, from_name: str, to_name: str) -> None: ... def delete_job(self, name: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/lazy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/lazy.pyi index 00e43d79cd..b0b5ca565a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/lazy.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/lazy.pyi @@ -13,8 +13,8 @@ class LazyDict(DictMixin[str, _VT]): class LazyList(list[_T]): # does not return `Self` type: - def __new__(cls, fill_iter=None) -> LazyList[_T]: ... + def __new__(cls, fill_iter: _T | None = None) -> LazyList[_T]: ... class LazySet(set[_T]): # does not return `Self` type: - def __new__(cls, fill_iter=None) -> LazySet[_T]: ... + def __new__(cls, fill_iter: _T | None = None) -> LazySet[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyxdg/xdg/RecentFiles.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyxdg/xdg/RecentFiles.pyi index aab686b6fc..742c9bdf68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyxdg/xdg/RecentFiles.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyxdg/xdg/RecentFiles.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrOrBytesPath, StrPath +from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterable class RecentFiles: @@ -8,11 +8,9 @@ class RecentFiles: def parse(self, filename: StrPath | None = None) -> None: ... def write(self, filename: StrOrBytesPath | None = None) -> None: ... def getFiles( - self, mimetypes: Iterable[str] | None = None, groups: Iterable[Incomplete] | None = None, limit: int = 0 - ) -> list[StrPath]: ... - def addFile( - self, item: StrPath, mimetype: str, groups: Iterable[Incomplete] | None = None, private: bool = False - ) -> None: ... + self, mimetypes: Iterable[str] | None = None, groups: Iterable[str] | None = None, limit: int = 0 + ) -> list[RecentFile]: ... + def addFile(self, item: StrPath, mimetype: str, groups: Iterable[str] | None = None, private: bool = False) -> None: ... def deleteFile(self, item: RecentFile | StrPath) -> None: ... def sort(self) -> None: ... @@ -21,7 +19,7 @@ class RecentFile: MimeType: str Timestamp: str Private: bool - Groups: list[Incomplete] + Groups: list[str] def __init__(self) -> None: ... def __cmp__(self, other: RecentFile) -> int: ... def __lt__(self, other: RecentFile) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/qrbill/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/qrbill/METADATA.toml index b46abf95c7..033646f396 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/qrbill/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/qrbill/METADATA.toml @@ -1,3 +1,3 @@ -version = "1.1.*" +version = "1.2.*" upstream_repository = "https://github.com/claudep/swiss-qr-bill" requires = ["types-qrcode"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/qrbill/qrbill/bill.pyi b/packages/pyright-internal/typeshed-fallback/stubs/qrbill/qrbill/bill.pyi index 651963333d..0011752309 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/qrbill/qrbill/bill.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/qrbill/qrbill/bill.pyi @@ -94,7 +94,14 @@ class QRBill: creditor: CombinedAddress | StructuredAddress final_creditor: CombinedAddress | StructuredAddress | None debtor: CombinedAddress | StructuredAddress | None + ref_type: str reference_number: str | None + account: str + account_is_qriban: bool + amount: str | None + currency: Literal["CHF", "EUR"] + additional_information: str + billing_information: str @overload def __init__( self, @@ -108,6 +115,7 @@ class QRBill: reference_number: str | None = None, extra_infos: Literal[""] = "", additional_information: str = "", + billing_information: str = "", alt_procs: list[str] | tuple[()] | tuple[str] | tuple[str, str] = (), language: Literal["en", "de", "fr", "it"] = "en", top_line: bool = True, @@ -129,6 +137,7 @@ class QRBill: reference_number: None = None, extra_infos: str = "", additional_information: str = "", + billing_information: str = "", alt_procs: list[str] | tuple[()] | tuple[str] | tuple[str, str] = (), language: Literal["en", "de", "fr", "it"] = "en", top_line: bool = True, @@ -150,6 +159,7 @@ class QRBill: *, extra_infos: str, additional_information: str = "", + billing_information: str = "", alt_procs: list[str] | tuple[()] | tuple[str] | tuple[str, str] = (), language: Literal["en", "de", "fr", "it"] = "en", top_line: bool = True, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml index e2e362f31f..d21d2b7423 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml @@ -1,2 +1,2 @@ -version = "2025.9.18" +version = "2025.11.3" upstream_repository = "https://github.com/mrabarnett/mrab-regex" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi index f310be6321..92e7589d01 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi @@ -1 +1,64 @@ -from .regex import * +from ._main import * + +# Sync with regex._main.__all__ +__all__ = [ + "cache_all", + "compile", + "DEFAULT_VERSION", + "escape", + "findall", + "finditer", + "fullmatch", + "match", + "purge", + "search", + "split", + "splititer", + "sub", + "subf", + "subfn", + "subn", + "template", + "Scanner", + "A", + "ASCII", + "B", + "BESTMATCH", + "D", + "DEBUG", + "E", + "ENHANCEMATCH", + "S", + "DOTALL", + "F", + "FULLCASE", + "I", + "IGNORECASE", + "L", + "LOCALE", + "M", + "MULTILINE", + "P", + "POSIX", + "R", + "REVERSE", + "T", + "TEMPLATE", + "U", + "UNICODE", + "V0", + "VERSION0", + "V1", + "VERSION1", + "X", + "VERBOSE", + "W", + "WORD", + "error", + "Regex", + "__version__", + "__doc__", + "RegexFlag", + "Pattern", + "Match", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_main.pyi similarity index 95% rename from packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_main.pyi index 990feb4c64..a37774c6eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_main.pyi @@ -11,6 +11,69 @@ _T = TypeVar("_T") __version__: str +# Sync with regex.__init__.__all__ +__all__ = [ + "cache_all", + "compile", + "DEFAULT_VERSION", + "escape", + "findall", + "finditer", + "fullmatch", + "match", + "purge", + "search", + "split", + "splititer", + "sub", + "subf", + "subfn", + "subn", + "template", + "Scanner", + "A", + "ASCII", + "B", + "BESTMATCH", + "D", + "DEBUG", + "E", + "ENHANCEMATCH", + "S", + "DOTALL", + "F", + "FULLCASE", + "I", + "IGNORECASE", + "L", + "LOCALE", + "M", + "MULTILINE", + "P", + "POSIX", + "R", + "REVERSE", + "T", + "TEMPLATE", + "U", + "UNICODE", + "V0", + "VERSION0", + "V1", + "VERSION1", + "X", + "VERBOSE", + "W", + "WORD", + "error", + "Regex", + "__version__", + "__doc__", + "RegexFlag", + "Pattern", + "Match", +] + def compile( pattern: AnyStr | Pattern[AnyStr], flags: int = 0, @@ -312,6 +375,9 @@ def cache_all(value: bool = True) -> None: ... @overload def cache_all(value: None) -> bool: ... def escape(pattern: AnyStr, special_only: bool = True, literal_spaces: bool = False) -> AnyStr: ... + +DEFAULT_VERSION = RegexFlag.VERSION0 + def template(pattern: AnyStr | Pattern[AnyStr], flags: int = 0) -> Pattern[AnyStr]: ... Regex = compile diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi index d9515249b4..c80e1662c3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi @@ -6,7 +6,7 @@ from typing import Any, AnyStr, Generic, final from typing_extensions import Self -from .regex import Match, Pattern +from ._main import Match, Pattern @final class Splitter(Generic[AnyStr]): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi index 7ca60c07e0..5778565d5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi @@ -3,7 +3,47 @@ from collections.abc import Callable from typing import Any, AnyStr, Generic from typing_extensions import TypeAlias -from .regex import Pattern +from ._main import Pattern + +__all__ = [ + "A", + "ASCII", + "B", + "BESTMATCH", + "D", + "DEBUG", + "E", + "ENHANCEMATCH", + "F", + "FULLCASE", + "I", + "IGNORECASE", + "L", + "LOCALE", + "M", + "MULTILINE", + "P", + "POSIX", + "R", + "REVERSE", + "S", + "DOTALL", + "T", + "TEMPLATE", + "U", + "UNICODE", + "V0", + "VERSION0", + "V1", + "VERSION1", + "W", + "WORD", + "X", + "VERBOSE", + "error", + "Scanner", + "RegexFlag", +] class error(Exception): def __init__(self, message: str, pattern: AnyStr | None = None, pos: int | None = None) -> None: ... @@ -44,42 +84,42 @@ class RegexFlag(enum.IntFlag): X = 0x40 VERBOSE = X -A = RegexFlag.A ASCII = RegexFlag.ASCII -B = RegexFlag.B BESTMATCH = RegexFlag.BESTMATCH -D = RegexFlag.D DEBUG = RegexFlag.DEBUG -E = RegexFlag.E ENHANCEMATCH = RegexFlag.ENHANCEMATCH -F = RegexFlag.F FULLCASE = RegexFlag.FULLCASE -I = RegexFlag.I IGNORECASE = RegexFlag.IGNORECASE -L = RegexFlag.L LOCALE = RegexFlag.LOCALE -M = RegexFlag.M MULTILINE = RegexFlag.MULTILINE -P = RegexFlag.P POSIX = RegexFlag.POSIX -R = RegexFlag.R REVERSE = RegexFlag.REVERSE -T = RegexFlag.T TEMPLATE = RegexFlag.TEMPLATE -S = RegexFlag.S DOTALL = RegexFlag.DOTALL -U = RegexFlag.U UNICODE = RegexFlag.UNICODE -V0 = RegexFlag.V0 +VERBOSE = RegexFlag.VERBOSE VERSION0 = RegexFlag.VERSION0 -V1 = RegexFlag.V1 VERSION1 = RegexFlag.VERSION1 -W = RegexFlag.W WORD = RegexFlag.WORD +A = RegexFlag.A +B = RegexFlag.B +D = RegexFlag.D +E = RegexFlag.E +F = RegexFlag.F +I = RegexFlag.I +L = RegexFlag.L +M = RegexFlag.M +P = RegexFlag.P +R = RegexFlag.R +S = RegexFlag.S +U = RegexFlag.U +V0 = RegexFlag.V0 +V1 = RegexFlag.V1 +W = RegexFlag.W X = RegexFlag.X -VERBOSE = RegexFlag.VERBOSE +T = RegexFlag.T -DEFAULT_VERSION: RegexFlag +DEFAULT_VERSION = VERSION1 _Lexicon: TypeAlias = list[tuple[AnyStr, Callable[[Scanner[AnyStr], AnyStr], Any]]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/METADATA.toml index e4f6253d99..a2e6db50cd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/METADATA.toml @@ -1,4 +1,4 @@ -version = "4.4.4" +version = "4.4.7" # GitHub mirror of https://hg.reportlab.com/hg-public/reportlab/file upstream_repository = "https://github.com/MrBitBucket/reportlab-mirror" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/lib/testutils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/lib/testutils.pyi index 0dd2fa2241..7a55147e37 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/lib/testutils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/lib/testutils.pyi @@ -1,40 +1,42 @@ +import re import unittest -from _typeshed import Incomplete -from configparser import ConfigParser +from _typeshed import Incomplete, StrPath, Unused +from configparser import ConfigParser, _SectionName +from types import ModuleType from typing import Final, Literal __version__: Final[str] -def haveRenderPM(): ... -def isWritable(D): ... +def invariantSeed(n: float | str | bytes | bytearray | None) -> None: ... +def haveRenderPM() -> ModuleType | Literal[False]: ... -RL_HOME: Incomplete -testsFolder: Incomplete - -DEJAVUSANS: tuple[ - Literal["DejaVuSans"], Literal["DejaVuSans-Bold"], Literal["DejaVuSans-Oblique"], Literal["DejaVuSans-BoldOblique"] -] = ... +DEJAVUSANS: Final = ("DejaVuSans", "DejaVuSans-Bold", "DejaVuSans-Oblique", "DejaVuSans-BoldOblique") def haveDejaVu() -> bool: ... -def setOutDir(name): ... -def mockUrlRead(name): ... -def outputfile(fn): ... +def isWritable(D: Unused) -> Literal[0, 1]: ... + +RL_HOME: str | None +testsFolder: str | None + +def setOutDir(name: str) -> str: ... +def mockUrlRead(name: str): ... +def outputfile(fn: StrPath | None) -> str: ... def printLocation(depth: int = 1) -> None: ... -def makeSuiteForClasses(*classes, testMethodPrefix=None): ... -def getCVSEntries(folder, files: int = 1, folders: int = 0): ... +def makeSuiteForClasses(*classes: type[unittest.TestCase], testMethodPrefix: str | None = None) -> unittest.TestSuite: ... +def getCVSEntries(folder: StrPath, files: bool | Literal[1, 0] = 1, folders: bool | Literal[1, 0] = 0) -> list[str]: ... class ExtConfigParser(ConfigParser): - pat: Incomplete - def getstringlist(self, section, option): ... + pat: re.Pattern[str] + def getstringlist(self, section: _SectionName, option: str): ... class GlobDirectoryWalker: index: int - pattern: Incomplete - stack: Incomplete - files: Incomplete - directory: Incomplete - def __init__(self, directory, pattern: str = "*") -> None: ... - def __getitem__(self, index): ... + pattern: str + stack: list[str] + files: list[str] + directory: str + def __init__(self, directory: str, pattern: str = "*") -> None: ... + def __getitem__(self, index) -> str | None: ... def filterFiles(self, folder, files): ... class RestrictedGlobDirectoryWalker(GlobDirectoryWalker): @@ -64,9 +66,9 @@ class ScriptThatMakesFileTest(unittest.TestCase): def tearDown(self) -> None: ... def runTest(self) -> None: ... -def equalStrings(a, b, enc: str = "utf8"): ... +def equalStrings(a: str | bytes, b: str | bytes, enc: str = "utf8") -> bool: ... def eqCheck(r, x) -> None: ... -def rlextraNeeded(): ... +def rlextraNeeded() -> bool: ... def rlSkipIf(cond, reason, __module__=None): ... def rlSkipUnless(cond, reason, __module__=None): ... def rlSkip(reason, __module__=None): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/platypus/flowables.pyi b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/platypus/flowables.pyi index 76dc086703..da28237e38 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/platypus/flowables.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/reportlab/reportlab/platypus/flowables.pyi @@ -411,7 +411,7 @@ class ListItem: # TODO: Use Unpack for kwds with the ListStyle properties + value/spaceBefore/spaceAfter def __init__(self, flowables: _FlowableSublist, style: PropertySet | None = None, **kwds) -> None: ... -class ListFlowable(_Container, Flowable): +class ListFlowable(_Container, Flowable, _FindSplitterMixin): style: ListStyle # NOTE: style has to be a ListStyle, but this will be annoying with sheet["ul"] # TODO: Use Unpack for kwds with the ListStyle properties + spaceBefore/spaceAfter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi index 130bce7250..24bfd70057 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi @@ -1,3 +1,4 @@ +from json import JSONDecodeError as CompatJSONDecodeError from typing import Any from urllib3.exceptions import HTTPError as BaseHTTPError @@ -13,7 +14,7 @@ class RequestException(OSError): ) -> None: ... class InvalidJSONError(RequestException): ... -class JSONDecodeError(InvalidJSONError): ... +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): ... class HTTPError(RequestException): request: Request | PreparedRequest | Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/data.pyi b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/data.pyi index e607d5846f..35769b27bf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/data.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/data.pyi @@ -3,8 +3,7 @@ from collections.abc import Mapping from typing import TypeVar, overload from pandas import DataFrame -from pandas.core.interchange.dataframe_protocol import DataFrame as DataFrameProtocol -from seaborn._core.typing import DataSource, VariableSpec +from seaborn._core.typing import DataSource, SupportsDataFrame, VariableSpec _T = TypeVar("_T", Mapping[Incomplete, Incomplete], None) @@ -22,5 +21,5 @@ class PlotData: @overload def handle_data_source(data: _T) -> _T: ... @overload -def handle_data_source(data: DataFrameProtocol) -> DataFrame: ... +def handle_data_source(data: SupportsDataFrame) -> DataFrame: ... def convert_dataframe_to_pandas(data: object) -> DataFrame: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/typing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/typing.pyi index 0cfdd59d2f..415f818440 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/typing.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/_core/typing.pyi @@ -10,8 +10,6 @@ from pandas import DataFrame, Index, Series, Timedelta, Timestamp @type_check_only class SupportsDataFrame(Protocol): - # `__dataframe__` should return pandas.core.interchange.dataframe_protocol.DataFrame - # but this class needs to be defined as a Protocol, not as an ABC. def __dataframe__(self, nan_as_null: bool = ..., allow_copy: bool = ...): ... ColumnName: TypeAlias = str | bytes | date | datetime | timedelta | bool | complex | Timestamp | Timedelta diff --git a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/categorical.pyi b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/categorical.pyi index b3c946d92d..eed93a8076 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/categorical.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/seaborn/seaborn/categorical.pyi @@ -27,7 +27,7 @@ def boxplot( dodge: bool | Literal["auto"] = "auto", width: float = 0.8, gap: float = 0, - whis: float = 1.5, + whis: float | tuple[float, float] = 1.5, linecolor: ColorType = "auto", linewidth: float | None = None, fliersize: float | None = None, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi index d8d0b22c30..45c1a79416 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi @@ -1,10 +1,12 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, StrPath from abc import abstractmethod -from collections.abc import Mapping, Sequence -from typing import Any, Literal, TypedDict, TypeVar, overload, type_check_only -from typing_extensions import NotRequired +from collections.abc import ItemsView, Iterable, Mapping, Sequence +from typing import Any, Literal, Protocol, TypedDict, TypeVar, overload, type_check_only +from typing_extensions import Never, NotRequired from ._distutils.cmd import Command as _Command +from ._distutils.dist import Distribution as _Distribution +from ._distutils.extension import Extension as _Extension from .command.alias import alias from .command.bdist_egg import bdist_egg from .command.bdist_rpm import bdist_rpm @@ -33,6 +35,9 @@ from .extension import Extension as Extension from .warnings import SetuptoolsDeprecationWarning as SetuptoolsDeprecationWarning _CommandT = TypeVar("_CommandT", bound=_Command) +_DistributionT = TypeVar("_DistributionT", bound=_Distribution, default=Distribution) +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) __all__ = [ "setup", @@ -47,6 +52,24 @@ __all__ = [ __version__: str +# We need any Command subclass to be valid +# Any: pyright would accept using covariance in __setitem__, but mypy won't let a dict be assignable to this protocol +# This is unsound, but it's a quirk of setuptools' internals +@type_check_only +class _DictLike(Protocol[_KT, _VT_co]): + # See note about using _VT_co instead of Any + def get(self, key: _KT, default: Any | None = None, /) -> _VT_co | None: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + def keys(self) -> Iterable[_KT]: ... + def __getitem__(self, key: _KT, /) -> _VT_co: ... + def __contains__(self, x: object, /) -> bool: ... + +@type_check_only +class _MutableDictLike(_DictLike[_KT, _VT_co], Protocol): + # See note about using _VT_co instead of Any + def __setitem__(self, key: _KT, value: Any, /) -> None: ... + def setdefault(self, key: _KT, default: Any, /) -> _VT_co: ... + @type_check_only class _BuildInfo(TypedDict): sources: list[str] | tuple[str, ...] @@ -60,49 +83,83 @@ find_namespace_packages = _Finder.find def setup( *, - name: str = ..., - version: str = ..., - description: str = ..., - long_description: str = ..., - long_description_content_type: str = ..., - author: str = ..., - author_email: str = ..., - maintainer: str = ..., - maintainer_email: str = ..., - url: str = ..., - download_url: str = ..., - packages: list[str] = ..., - py_modules: list[str] = ..., - scripts: list[str] = ..., - ext_modules: Sequence[Extension] = ..., - classifiers: list[str] = ..., - distclass: type[Distribution] = ..., - script_name: str = ..., - script_args: list[str] = ..., - options: Mapping[str, Incomplete] = ..., - license: str = ..., - keywords: list[str] | str = ..., - platforms: list[str] | str = ..., - cmdclass: Mapping[str, type[_Command]] = ..., - data_files: list[tuple[str, list[str]]] = ..., - package_dir: Mapping[str, str] = ..., - obsoletes: list[str] = ..., - provides: list[str] = ..., - requires: list[str] = ..., - command_packages: list[str] = ..., - command_options: Mapping[str, Mapping[str, tuple[Incomplete, Incomplete]]] = ..., - package_data: Mapping[str, list[str]] = ..., - include_package_data: bool = ..., - # libraries for `Distribution` or `build_clib`, not `Extension`, `build_ext` or `CCompiler` - libraries: list[tuple[str, _BuildInfo]] = ..., - headers: list[str] = ..., - ext_package: str = ..., - include_dirs: list[str] = ..., - password: str = ..., - fullname: str = ..., + # Attributes from distutils.dist.DistributionMetadata.set_* + # These take priority over attributes from distutils.dist.DistributionMetadata.__init__ + keywords: str | Iterable[str] = ..., + platforms: str | Iterable[str] = ..., + classifiers: str | Iterable[str] = ..., + requires: Iterable[str] = ..., + provides: Iterable[str] = ..., + obsoletes: Iterable[str] = ..., + # Attributes from distutils.dist.DistributionMetadata.__init__ + # These take priority over attributes from distutils.dist.Distribution.__init__ + name: str | None = None, + version: str | None = None, + author: str | None = None, + author_email: str | None = None, + maintainer: str | None = None, + maintainer_email: str | None = None, + url: str | None = None, + license: str | None = None, + description: str | None = None, + long_description: str | None = None, + download_url: str | None = None, + # Attributes from distutils.dist.Distribution.__init__ (except self.metadata) + # These take priority over attributes from distutils.dist.Distribution.display_option_names + verbose: bool = True, + dry_run: bool = False, + help: bool = False, + cmdclass: _MutableDictLike[str, type[_Command]] = {}, + command_packages: str | list[str] | None = None, + script_name: StrPath | None = ..., # default is actually set in distutils.core.setup + script_args: list[str] | None = ..., # default is actually set in distutils.core.setup + command_options: _MutableDictLike[str, _DictLike[str, tuple[str, str]]] = {}, + packages: list[str] | None = None, + package_dir: Mapping[str, str] | None = None, + py_modules: list[str] | None = None, + libraries: list[tuple[str, _BuildInfo]] | None = None, + headers: list[str] | None = None, + ext_modules: Sequence[_Extension] | None = None, + ext_package: str | None = None, + include_dirs: list[str] | None = None, + extra_path: Never = ..., # Deprecated + scripts: list[str] | None = None, + data_files: list[tuple[str, Sequence[str]]] | None = None, + password: str = "", + command_obj: _MutableDictLike[str, _Command] = {}, + have_run: _MutableDictLike[str, bool] = {}, + # kwargs used directly in distutils.dist.Distribution.__init__ + options: Mapping[str, Mapping[str, str]] | None = None, + licence: Never = ..., # Deprecated + # Attributes from distutils.dist.Distribution.display_option_names + # (this can more easily be copied from the `if TYPE_CHECKING` block) + help_commands: bool = False, + fullname: str | Literal[False] = False, + contact: str | Literal[False] = False, + contact_email: str | Literal[False] = False, + # kwargs used directly in setuptools.dist.Distribution.__init__ + # and attributes from setuptools.dist.Distribution.__init__ + package_data: _DictLike[str, list[str]] = {}, + dist_files: list[tuple[str, str, str]] = [], + include_package_data: bool | None = None, + exclude_package_data: _DictLike[str, list[str]] | None = None, + src_root: str | None = None, + dependency_links: list[str] = [], + setup_requires: list[str] = [], + # From Distribution._DISTUTILS_UNSUPPORTED_METADATA set in Distribution._set_metadata_defaults + long_description_content_type: str | None = None, + project_urls: _DictLike[Incomplete, Incomplete] = {}, + provides_extras: _MutableDictLike[Incomplete, Incomplete] = {}, + license_expression: str | None = None, + license_file: Never = ..., # Deprecated + license_files: Iterable[str] | None = None, + install_requires: str | Iterable[str] = [], + extras_require: _DictLike[Incomplete, Incomplete] = {}, + # kwargs used directly in distutils.core.setup + distclass: type[_DistributionT] = Distribution, # type: ignore[assignment] # noqa: Y011 # Custom Distributions could accept more params **attrs: Any, -) -> Distribution: ... +) -> _DistributionT: ... class Command(_Command): command_consumes_arguments: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/cmd.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/cmd.pyi index 2c22e46098..036bb08441 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/cmd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/cmd.pyi @@ -1,7 +1,7 @@ from _typeshed import BytesPath, StrOrBytesPath, StrPath, Unused from abc import abstractmethod -from collections.abc import Callable, MutableSequence -from typing import Any, ClassVar, TypeVar, overload +from collections.abc import Callable, MutableSequence, Sequence +from typing import Any, ClassVar, Literal, TypeVar, overload from typing_extensions import TypeVarTuple, Unpack from .dist import Distribution @@ -84,7 +84,10 @@ class Command: def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ... @overload def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... - def spawn(self, cmd: MutableSequence[str], search_path: bool = True, level: Unused = 1) -> None: ... + @overload + def spawn(self, cmd: Sequence[StrOrBytesPath], search_path: Literal[False], level: Unused = 1) -> None: ... + @overload + def spawn(self, cmd: MutableSequence[bytes | StrPath], search_path: Literal[True] = True, level: Unused = 1) -> None: ... @overload def make_archive( self, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi index f33e86e6ba..8f51b67905 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/base.pyi @@ -1,5 +1,6 @@ -from _typeshed import BytesPath, Incomplete, StrPath, Unused +from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused from collections.abc import Callable, Iterable, MutableSequence, Sequence +from subprocess import _ENV from typing import ClassVar, Final, Literal, TypeVar, overload from typing_extensions import TypeAlias, TypeVarTuple, Unpack @@ -172,7 +173,12 @@ class Compiler: def execute( self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 ) -> None: ... - def spawn(self, cmd: MutableSequence[bytes | StrPath]) -> None: ... + @overload + def spawn(self, cmd: Sequence[StrOrBytesPath], *, search_path: Literal[False], env: _ENV | None = None) -> None: ... + @overload + def spawn( + self, cmd: MutableSequence[bytes | StrPath], *, search_path: Literal[True] = True, env: _ENV | None = None + ) -> None: ... def mkpath(self, name: str, mode: int = 0o777) -> None: ... @overload def move_file(self, src: StrPath, dst: _StrPathT) -> _StrPathT | str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi index 44f5c85b8b..ad2f16eb7d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/compilers/C/msvc.pyi @@ -1,3 +1,5 @@ +from _typeshed import StrPath +from collections.abc import Sequence from typing import ClassVar, Final from . import base @@ -17,3 +19,4 @@ class Compiler(base.Compiler): def initialize(self, plat_name: str | None = None) -> None: ... @property def out_extensions(self) -> dict[str, str]: ... + def spawn(self, cmd: Sequence[bytes | StrPath]): ... # type: ignore[override] # Less params diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/spawn.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/spawn.pyi index 259a3a99d2..9b725d43a6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/spawn.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/_distutils/spawn.pyi @@ -1,11 +1,21 @@ -from _typeshed import StrPath -from collections.abc import MutableSequence +from _typeshed import StrOrBytesPath, StrPath, Unused +from collections.abc import MutableSequence, Sequence from subprocess import _ENV +from typing import Literal, overload +@overload +def spawn( + cmd: Sequence[StrOrBytesPath], + search_path: Literal[False], + verbose: Unused = False, + dry_run: bool = False, + env: _ENV | None = None, +) -> None: ... +@overload def spawn( cmd: MutableSequence[bytes | StrPath], - search_path: bool = True, - verbose: bool = False, + search_path: Literal[True] = True, + verbose: Unused = False, dry_run: bool = False, env: _ENV | None = None, ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/METADATA.toml new file mode 100644 index 0000000000..9b307e6bda --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/METADATA.toml @@ -0,0 +1,3 @@ +version = "1.1.*" +upstream_repository = "https://github.com/miguelgrinberg/simple-websocket" +requires = ["wsproto"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/__init__.pyi new file mode 100644 index 0000000000..d3da863383 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/__init__.pyi @@ -0,0 +1,3 @@ +from .aiows import AioClient as AioClient, AioServer as AioServer +from .errors import ConnectionClosed as ConnectionClosed, ConnectionError as ConnectionError +from .ws import Client as Client, Server as Server diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/aiows.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/aiows.pyi new file mode 100644 index 0000000000..066a243cb7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/aiows.pyi @@ -0,0 +1,130 @@ +import asyncio +import socket +from _typeshed import Incomplete, Unused +from _typeshed.wsgi import WSGIEnvironment +from collections.abc import Awaitable, Callable +from ssl import SSLContext +from typing import Any, Literal, TypedDict, type_check_only + +from wsproto import ConnectionType, WSConnection +from wsproto.events import Request +from wsproto.frame_protocol import CloseReason + +from .asgi import WebSocketASGI, _SocketDataBase, _SocketDataBytes, _SocketDataProtocol, _SocketDataStr + +class AioBase: + subprotocol: str | None + connection_type: ConnectionType + receive_bytes: int + ping_interval: float | None + max_message_size: int | None + pong_received: bool + input_buffer: list[bytes | str] + incoming_message: bytes | str | None + incoming_message_len: int + connected: bool + is_server: bool + close_reason: CloseReason + close_message: str + rsock: asyncio.StreamReader + wsock: asyncio.StreamWriter + event: asyncio.Event + ws: WSConnection | None + task: asyncio.Task[None] + def __init__( + self, + connection_type: ConnectionType | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ) -> None: ... + async def connect(self) -> None: ... + async def handshake(self) -> None: ... + # data can be antyhing. a special case is made for `bytes`, anything else is converted to `str`. + async def send(self, data: bytes | Any) -> None: ... + async def receive(self, timeout: float | None = None) -> bytes | str | None: ... + async def close(self, reason: CloseReason | None = None, message: str | None = None) -> None: ... + def choose_subprotocol(self, request: Request) -> str | None: ... + +@type_check_only +class _AioServerRequest(TypedDict): + # this is `aiohttp.web.Request` + aiohttp: Incomplete + sock: None + headers: None + +class AioServer(AioBase): + request: _AioServerRequest + headers: dict[str, Any] + subprotocols: list[str] + is_server: Literal[True] + mode: str + connected: bool + def __init__( + self, + request: _AioServerRequest, + subprotocols: list[str] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ) -> None: ... + @classmethod + async def accept( + cls, + # this is `aiohttp.web.Request` + aiohttp=None, + asgi: ( + tuple[ + WSGIEnvironment, + Callable[[], Awaitable[_SocketDataBytes | _SocketDataStr]], + Callable[[_SocketDataBase | _SocketDataProtocol | _SocketDataBytes | _SocketDataStr], Awaitable[None]], + ] + | None + ) = None, + sock: socket.socket | None = None, + headers: dict[str, Any] | None = None, + subprotocols: list[str] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ) -> WebSocketASGI | AioServer: ... + async def handshake(self) -> None: ... + def choose_subprotocol(self, request: Request) -> str | None: ... + +class AioClient(AioBase): + url: str + ssl_context: SSLContext | None + is_secure: bool + host: str + port: int + path: str + subprotocols: list[str] + extra_headeers: list[tuple[bytes, bytes]] + subprotocol: str | None + connected: bool + def __init__( + self, + url: str, + subprotocols: list[str] | None = None, + headers: dict[str, Any] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ssl_context: SSLContext | None = None, + ) -> None: ... + # the source code itself has this override + @classmethod + async def connect( # type: ignore[override] + cls, + url: str, + subprotocols: list[str] | None = None, + headers: dict[str, Any] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ssl_context: SSLContext | None = None, + thread_class: Unused = None, + event_class: Unused = None, + ) -> AioClient: ... + async def handshake(self) -> None: ... + async def close(self, reason: CloseReason | None = None, message: str | None = None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/asgi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/asgi.pyi new file mode 100644 index 0000000000..92e802188c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/asgi.pyi @@ -0,0 +1,44 @@ +from _typeshed.wsgi import WSGIEnvironment +from collections.abc import Awaitable, Callable +from typing import TypedDict, type_check_only + +@type_check_only +class _SocketDataBase(TypedDict): + type: str + +@type_check_only +class _SocketDataProtocol(_SocketDataBase): + subprotocol: str | None + +@type_check_only +class _SocketDataStr(_SocketDataBase): + text: str + +@type_check_only +class _SocketDataBytes(_SocketDataBase): + bytes: bytes + +class WebSocketASGI: + subprotocols: list[str] + subprotocol: str + connected: bool + # this is set in `close` to `False` + conncted: bool + def __init__( + self, + scope: WSGIEnvironment, + receive: Callable[[], Awaitable[_SocketDataBytes | _SocketDataStr]], + send: Callable[[_SocketDataBase | _SocketDataProtocol | _SocketDataBytes | _SocketDataStr], Awaitable[None]], + subprotocols: list[str] | None = None, + ) -> None: ... + @classmethod + async def accept( + cls, + scope: WSGIEnvironment, + receive: Callable[[], Awaitable[_SocketDataBytes | _SocketDataStr]], + send: Callable[[_SocketDataBase | _SocketDataProtocol | _SocketDataBytes | _SocketDataStr], Awaitable[None]], + subprotocols: list[str] | None = None, + ) -> WebSocketASGI: ... + async def receive(self) -> bytes | str: ... + async def send(self, data: bytes | str) -> None: ... + async def close(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/errors.pyi new file mode 100644 index 0000000000..88e2506512 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/errors.pyi @@ -0,0 +1,12 @@ +from wsproto.frame_protocol import CloseReason + +class SimpleWebsocketError(RuntimeError): ... + +class ConnectionError(SimpleWebsocketError): + status_code: int | None + def __init__(self, status_code: int | None = None) -> None: ... + +class ConnectionClosed(SimpleWebsocketError): + reason: CloseReason + message: str | None + def __init__(self, reason: CloseReason = ..., message: str | None = None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/ws.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/ws.pyi new file mode 100644 index 0000000000..d469cd38d6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simple-websocket/simple_websocket/ws.pyi @@ -0,0 +1,136 @@ +import socket +import threading +from _typeshed import FileDescriptorLike +from _typeshed.wsgi import WSGIEnvironment +from collections.abc import Callable +from selectors import SelectorKey, _EventMask +from ssl import SSLContext +from typing import Any, Protocol, type_check_only + +from wsproto import ConnectionType, WSConnection +from wsproto.events import Request +from wsproto.frame_protocol import CloseReason + +@type_check_only +class _ThreadClassProtocol(Protocol): + name: str + # this accepts any callable as the target, like `threading.Thread` + def __init__(self, target: Callable[..., Any]) -> None: ... + def start(self) -> None: ... + +@type_check_only +class _EventClassProtocol(Protocol): + def clear(self) -> None: ... + def set(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +@type_check_only +class _SelectorClassProtocol(Protocol): + # the signature of `register` here is the same as `selectors._BaseSelectorImpl` from the stdlib + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + # the signature of `select` here is the same as `selectors.DefaultSelector` from the stdlib + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def close(self) -> None: ... + +class Base: + subprotocol: str | None + sock: socket.socket | None + receive_bytes: int + ping_interval: float | None + max_message_size: int | None + pong_received: bool + input_buffer: list[bytes | str] + incoming_message: bytes | str | None + incoming_message_len: int + connected: bool + is_server: bool + close_reason: CloseReason + close_message: str | None + selector_class: type[_SelectorClassProtocol] + event: _EventClassProtocol | threading.Event + ws: WSConnection + thread: _ThreadClassProtocol | threading.Thread + def __init__( + self, + sock: socket.socket | None = None, + connection_type: ConnectionType | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + thread_class: type[_ThreadClassProtocol] | None = None, + event_class: type[_EventClassProtocol] | None = None, + selector_class: type[_SelectorClassProtocol] | None = None, + ) -> None: ... + def handshake(self) -> None: ... + # data can be antyhing. a special case is made for `bytes`, anything else is converted to `str`. + def send(self, data: bytes | Any) -> None: ... + def receive(self, timeout: float | None = None) -> bytes | str | None: ... + def close(self, reason: CloseReason | None = None, message: str | None = None) -> None: ... + def choose_subprotocol(self, request: Request) -> str | None: ... + +class Server(Base): + environ: WSGIEnvironment + subprotocols: list[str] + mode: str + connected: bool + def __init__( + self, + environ: WSGIEnvironment, + subprotocols: list[str] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + thread_class: type[_ThreadClassProtocol] | None = None, + event_class: type[_EventClassProtocol] | None = None, + selector_class: type[_SelectorClassProtocol] | None = None, + ) -> None: ... + @classmethod + def accept( + cls, + environ: WSGIEnvironment, + subprotocols: list[str] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + thread_class: type[_ThreadClassProtocol] | None = None, + event_class: type[_EventClassProtocol] | None = None, + selector_class: type[_SelectorClassProtocol] | None = None, + ) -> Server: ... + def handshake(self) -> None: ... + def choose_subprotocol(self, request: Request) -> str | None: ... + +class Client(Base): + host: str + port: int + path: str + subprotocols: list[str] + extra_headeers: list[tuple[bytes, bytes]] + subprotocol: str | None + connected: bool + def __init__( + self, + url: str, + subprotocols: list[str] | None = None, + headers: dict[bytes, bytes] | list[tuple[bytes, bytes]] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ssl_context: SSLContext | None = None, + thread_class: type[_ThreadClassProtocol] | None = None, + event_class: type[_EventClassProtocol] | None = None, + ) -> None: ... + @classmethod + def connect( + cls, + url: str, + subprotocols: list[str] | None = None, + headers: dict[bytes, bytes] | list[tuple[bytes, bytes]] | None = None, + receive_bytes: int = 4096, + ping_interval: float | None = None, + max_message_size: int | None = None, + ssl_context: SSLContext | None = None, + thread_class: type[_ThreadClassProtocol] | None = None, + event_class: type[_EventClassProtocol] | None = None, + ) -> Client: ... + def handshake(self) -> None: ... + def close(self, reason: CloseReason | None = None, message: str | None = None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/METADATA.toml new file mode 100644 index 0000000000..a207ca0560 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/METADATA.toml @@ -0,0 +1,16 @@ +# Using an exact number in the specifier for scripts/sync_protobuf/tensorflow.py +# When updating, also re-run the script +version = "~=2.18.0" +upstream_repository = "https://github.com/tensorflow/tensorflow" +# requires a version of numpy with a `py.typed` file +requires = ["numpy>=1.20", "types-protobuf", "types-requests"] +extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 27.2 on `tensorflow==2.18.0`." +partial_stub = true + +[tool.stubtest] +ignore_missing_stub = true +# TODO: Support/update to keras 3.7 +stubtest_requirements = ["keras==3.6.*"] +# tensorflow 2.19 doesn't support Python 3.13: +# https://github.com/tensorflow/tensorflow/issues/78774 +skip = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/__init__.pyi new file mode 100644 index 0000000000..3a356392a6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/__init__.pyi @@ -0,0 +1,437 @@ +import abc +from _typeshed import Incomplete, Unused +from abc import ABC, ABCMeta, abstractmethod +from builtins import bool as _bool +from collections.abc import Callable, Generator, Iterable, Iterator, Sequence +from contextlib import contextmanager +from enum import Enum +from types import TracebackType +from typing import Any, Generic, Literal, TypeVar, overload +from typing_extensions import ParamSpec, Self + +from google.protobuf.message import Message +from tensorflow import ( + data as data, + experimental as experimental, + feature_column as feature_column, + initializers as initializers, + io as io, + keras as keras, + math as math, + random as random, + types as types, +) +from tensorflow._aliases import ( + AnyArray, + DTypeLike, + IntArray, + ScalarTensorCompatible, + ShapeLike, + Slice, + SparseTensorCompatible, + TensorCompatible, + UIntTensorCompatible, +) +from tensorflow.autodiff import GradientTape as GradientTape +from tensorflow.core.protobuf import struct_pb2 +from tensorflow.dtypes import * +from tensorflow.experimental.dtensor import Layout +from tensorflow.keras import losses as losses +from tensorflow.linalg import eye as eye + +# Most tf.math functions are exported as tf, but sadly not all are. +from tensorflow.math import ( + abs as abs, + add as add, + add_n as add_n, + argmax as argmax, + argmin as argmin, + cos as cos, + cosh as cosh, + divide as divide, + equal as equal, + greater as greater, + greater_equal as greater_equal, + less as less, + less_equal as less_equal, + logical_and as logical_and, + logical_not as logical_not, + logical_or as logical_or, + maximum as maximum, + minimum as minimum, + multiply as multiply, + not_equal as not_equal, + pow as pow, + reduce_max as reduce_max, + reduce_mean as reduce_mean, + reduce_min as reduce_min, + reduce_prod as reduce_prod, + reduce_sum as reduce_sum, + round as round, + sigmoid as sigmoid, + sign as sign, + sin as sin, + sinh as sinh, + sqrt as sqrt, + square as square, + subtract as subtract, + tanh as tanh, +) +from tensorflow.python.trackable.autotrackable import AutoTrackable +from tensorflow.sparse import SparseTensor as SparseTensor + +# Tensors ideally should be a generic type, but properly typing data type/shape +# will be a lot of work. Until we have good non-generic tensorflow stubs, +# we will skip making Tensor generic. Also good type hints for shapes will +# run quickly into many places where type system is not strong enough today. +# So shape typing is probably not worth doing anytime soon. +class Tensor: + def __init__(self, op: Operation, value_index: int, dtype: DType) -> None: ... + def consumers(self) -> list[Incomplete]: ... + @property + def shape(self) -> TensorShape: ... + def get_shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @property + def graph(self) -> Graph: ... + @property + def name(self) -> str: ... + @property + def op(self) -> Operation: ... + def numpy(self) -> AnyArray: ... + def __array__(self, dtype: DTypeLike | None = None) -> AnyArray: ... + def __int__(self) -> int: ... + def __abs__(self, name: str | None = None) -> Tensor: ... + def __add__(self, other: TensorCompatible) -> Tensor: ... + def __radd__(self, other: TensorCompatible) -> Tensor: ... + def __sub__(self, other: TensorCompatible) -> Tensor: ... + def __rsub__(self, other: TensorCompatible) -> Tensor: ... + def __mul__(self, other: TensorCompatible) -> Tensor: ... + def __rmul__(self, other: TensorCompatible) -> Tensor: ... + def __pow__(self, other: TensorCompatible) -> Tensor: ... + def __matmul__(self, other: TensorCompatible) -> Tensor: ... + def __rmatmul__(self, other: TensorCompatible) -> Tensor: ... + def __floordiv__(self, other: TensorCompatible) -> Tensor: ... + def __rfloordiv__(self, other: TensorCompatible) -> Tensor: ... + def __truediv__(self, other: TensorCompatible) -> Tensor: ... + def __rtruediv__(self, other: TensorCompatible) -> Tensor: ... + def __neg__(self, name: str | None = None) -> Tensor: ... + def __and__(self, other: TensorCompatible) -> Tensor: ... + def __rand__(self, other: TensorCompatible) -> Tensor: ... + def __or__(self, other: TensorCompatible) -> Tensor: ... + def __ror__(self, other: TensorCompatible) -> Tensor: ... + def __eq__(self, other: TensorCompatible) -> Tensor: ... # type: ignore[override] + def __ne__(self, other: TensorCompatible) -> Tensor: ... # type: ignore[override] + def __ge__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... + def __gt__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... + def __le__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... + def __lt__(self, other: TensorCompatible, name: str | None = None) -> Tensor: ... + def __bool__(self) -> _bool: ... + def __getitem__(self, slice_spec: Slice | tuple[Slice, ...]) -> Tensor: ... + def __len__(self) -> int: ... + # This only works for rank 0 tensors. + def __index__(self) -> int: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class VariableSynchronization(Enum): + AUTO = 0 + NONE = 1 + ON_WRITE = 2 + ON_READ = 3 + +class VariableAggregation(Enum): + NONE = 0 + SUM = 1 + MEAN = 2 + ONLY_FIRST_REPLICA = 3 + +class _VariableMetaclass(type): ... + +# Variable class in intent/documentation is a Tensor. In implementation there's +# TODO: comment to make it Tensor. It is not actually Tensor type wise, but even +# dynamically patches on most methods of tf.Tensor +# https://github.com/tensorflow/tensorflow/blob/9524a636cae9ae3f0554203c1ba7ee29c85fcf12/tensorflow/python/ops/variables.py#L1086. +class Variable(Tensor, metaclass=_VariableMetaclass): + def __init__( + self, + initial_value: Tensor | Callable[[], Tensor] | None = None, + trainable: _bool | None = None, + validate_shape: _bool = True, + # Valid non-None values are deprecated. + caching_device: None = None, + name: str | None = None, + # Real type is VariableDef protobuf type. Can be added after adding script + # to generate tensorflow protobuf stubs with mypy-protobuf. + variable_def=None, + dtype: DTypeLike | None = None, + import_scope: str | None = None, + constraint: Callable[[Tensor], Tensor] | None = None, + synchronization: VariableSynchronization = ..., + aggregation: VariableAggregation = ..., + shape: ShapeLike | None = None, + experimental_enable_variable_lifting: _bool = True, + ) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class RaggedTensor(metaclass=ABCMeta): + def bounding_shape( + self, axis: TensorCompatible | None = None, name: str | None = None, out_type: DTypeLike | None = None + ) -> Tensor: ... + @classmethod + def from_sparse(cls, st_input: SparseTensor, name: str | None = None, row_splits_dtype: DTypeLike = ...) -> RaggedTensor: ... + def to_sparse(self, name: str | None = None) -> SparseTensor: ... + def to_tensor( + self, default_value: float | str | None = None, name: str | None = None, shape: ShapeLike | None = None + ) -> Tensor: ... + def __add__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __radd__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __sub__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __mul__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __rmul__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __floordiv__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __truediv__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __getitem__(self, slice_spec: Slice | tuple[Slice, ...]) -> RaggedTensor: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class Operation: + def __init__( + self, + node_def, + g: Graph, + # isinstance is used so can not be Sequence/Iterable. + inputs: list[Tensor] | None = None, + output_types: Unused = None, + control_inputs: Iterable[Tensor | Operation] | None = None, + input_types: Iterable[DType] | None = None, + original_op: Operation | None = None, + op_def=None, + ) -> None: ... + @property + def inputs(self) -> list[Tensor]: ... + @property + def outputs(self) -> list[Tensor]: ... + @property + def device(self) -> str: ... + @property + def name(self) -> str: ... + @property + def type(self) -> str: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class TensorShape(metaclass=ABCMeta): + def __init__(self, dims: ShapeLike) -> None: ... + @property + def rank(self) -> int: ... + def as_list(self) -> list[int | None]: ... + def assert_has_rank(self, rank: int) -> None: ... + def assert_is_compatible_with(self, other: Iterable[int | None]) -> None: ... + def __bool__(self) -> _bool: ... + @overload + def __getitem__(self, key: int) -> int | None: ... + @overload + def __getitem__(self, key: slice) -> TensorShape: ... + def __iter__(self) -> Iterator[int | None]: ... + def __len__(self) -> int: ... + def __add__(self, other: Iterable[int | None]) -> TensorShape: ... + def __radd__(self, other: Iterable[int | None]) -> TensorShape: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class Graph: + def add_to_collection(self, name: str, value: object) -> None: ... + def add_to_collections(self, names: Iterable[str] | str, value: object) -> None: ... + @contextmanager + def as_default(self) -> Generator[Self]: ... + def finalize(self) -> None: ... + def get_tensor_by_name(self, name: str) -> Tensor: ... + def get_operation_by_name(self, name: str) -> Operation: ... + def get_operations(self) -> list[Operation]: ... + def get_name_scope(self) -> str: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class IndexedSlices(metaclass=ABCMeta): + def __init__(self, values: Tensor, indices: Tensor, dense_shape: None | Tensor = None) -> None: ... + @property + def values(self) -> Tensor: ... + @property + def indices(self) -> Tensor: ... + @property + def dense_shape(self) -> None | Tensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @property + def name(self) -> str: ... + @property + def op(self) -> Operation: ... + @property + def graph(self) -> Graph: ... + @property + def device(self) -> str: ... + def __neg__(self) -> IndexedSlices: ... + def consumers(self) -> list[Operation]: ... + +class name_scope(metaclass=abc.ABCMeta): + def __init__(self, name: str) -> None: ... + def __enter__(self) -> str: ... + def __exit__(self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None) -> None: ... + +_P = ParamSpec("_P") +_R = TypeVar("_R") + +class Module(AutoTrackable): + def __init__(self, name: str | None = None) -> None: ... + @property + def name(self) -> str: ... + @property + def name_scope(self) -> name_scope: ... + # Documentation only specifies these as returning Sequence. Actual + # implementation does tuple. + @property + def variables(self) -> Sequence[Variable]: ... + @property + def trainable_variables(self) -> Sequence[Variable]: ... + @property + def non_trainable_variables(self) -> Sequence[Variable]: ... + @property + def submodules(self) -> Sequence[Module]: ... + @classmethod + def with_name_scope(cls, method: Callable[_P, _R]) -> Callable[_P, _R]: ... + +class UnconnectedGradients(Enum): + NONE = "none" + ZERO = "zero" + +_SpecProto = TypeVar("_SpecProto", bound=Message) + +class TypeSpec(ABC, Generic[_SpecProto]): + @property + @abstractmethod + def value_type(self) -> Any: ... + def experimental_as_proto(self) -> _SpecProto: ... + @classmethod + def experimental_from_proto(cls, proto: _SpecProto) -> Self: ... + @classmethod + def experimental_type_proto(cls) -> type[_SpecProto]: ... + def is_compatible_with(self, spec_or_value: Self | TensorCompatible | SparseTensor | RaggedTensor) -> _bool: ... + # Incomplete as tf.types is not yet covered. + def is_subtype_of(self, other) -> _bool: ... + def most_specific_common_supertype(self, others: Sequence[Incomplete]) -> Self | None: ... + def most_specific_compatible_type(self, other: Self) -> Self: ... + +class TensorSpec(TypeSpec[struct_pb2.TensorSpecProto]): + def __init__(self, shape: ShapeLike, dtype: DTypeLike = ..., name: str | None = None) -> None: ... + @property + def value_type(self) -> Tensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @property + def name(self) -> str | None: ... + @classmethod + def from_spec(cls, spec: TypeSpec[Any], name: str | None = None) -> Self: ... + @classmethod + def from_tensor(cls, tensor: Tensor, name: str | None = None) -> Self: ... + def is_compatible_with(self, spec_or_tensor: Self | TensorCompatible) -> _bool: ... # type: ignore[override] + +class SparseTensorSpec(TypeSpec[struct_pb2.TypeSpecProto]): + def __init__(self, shape: ShapeLike | None = None, dtype: DTypeLike = ...) -> None: ... + @property + def value_type(self) -> SparseTensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @classmethod + def from_value(cls, value: SparseTensor) -> Self: ... + +class RaggedTensorSpec(TypeSpec[struct_pb2.TypeSpecProto]): + def __init__( + self, + shape: ShapeLike | None = None, + dtype: DTypeLike = ..., + ragged_rank: int | None = None, + row_splits_dtype: DTypeLike = ..., + flat_values_spec: TypeSpec[Any] | None = None, + ) -> None: ... + @property + def value_type(self) -> RaggedTensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @classmethod + def from_value(cls, value: RaggedTensor) -> Self: ... + +def convert_to_tensor( + value: TensorCompatible | IndexedSlices, + dtype: DTypeLike | None = None, + dtype_hint: DTypeLike | None = None, + name: str | None = None, +) -> Tensor: ... +@overload +def expand_dims(input: TensorCompatible, axis: int, name: str | None = None) -> Tensor: ... +@overload +def expand_dims(input: RaggedTensor, axis: int, name: str | None = None) -> RaggedTensor: ... +@overload +def concat(values: TensorCompatible, axis: int, name: str | None = "concat") -> Tensor: ... +@overload +def concat(values: Sequence[RaggedTensor], axis: int, name: str | None = "concat") -> RaggedTensor: ... +@overload +def squeeze( + input: TensorCompatible, axis: int | tuple[int, ...] | list[int] | None = None, name: str | None = None +) -> Tensor: ... +@overload +def squeeze(input: RaggedTensor, axis: int | tuple[int, ...] | list[int], name: str | None = None) -> RaggedTensor: ... +def tensor_scatter_nd_update( + tensor: TensorCompatible, indices: TensorCompatible, updates: TensorCompatible, name: str | None = None +) -> Tensor: ... +def constant( + value: TensorCompatible, dtype: DTypeLike | None = None, shape: ShapeLike | None = None, name: str | None = "Const" +) -> Tensor: ... +@overload +def cast(x: TensorCompatible, dtype: DTypeLike, name: str | None = None) -> Tensor: ... +@overload +def cast(x: SparseTensor, dtype: DTypeLike, name: str | None = None) -> SparseTensor: ... +@overload +def cast(x: RaggedTensor, dtype: DTypeLike, name: str | None = None) -> RaggedTensor: ... +def zeros(shape: ShapeLike, dtype: DTypeLike = ..., name: str | None = None, layout: Layout | None = None) -> Tensor: ... +def ones(shape: ShapeLike, dtype: DTypeLike = ..., name: str | None = None, layout: Layout | None = None) -> Tensor: ... +@overload +def zeros_like( + input: TensorCompatible | IndexedSlices, dtype: DTypeLike | None = None, name: str | None = None, layout: Layout | None = None +) -> Tensor: ... +@overload +def zeros_like( + input: RaggedTensor, dtype: DTypeLike | None = None, name: str | None = None, layout: Layout | None = None +) -> RaggedTensor: ... +@overload +def ones_like( + input: TensorCompatible, dtype: DTypeLike | None = None, name: str | None = None, layout: Layout | None = None +) -> Tensor: ... +@overload +def ones_like( + input: RaggedTensor, dtype: DTypeLike | None = None, name: str | None = None, layout: Layout | None = None +) -> RaggedTensor: ... +def reshape(tensor: TensorCompatible, shape: ShapeLike | Tensor, name: str | None = None) -> Tensor: ... +def pad( + tensor: TensorCompatible, + paddings: Tensor | IntArray | Iterable[Iterable[int]], + mode: Literal["CONSTANT", "constant", "REFLECT", "reflect", "SYMMETRIC", "symmetric"] = "CONSTANT", + constant_values: ScalarTensorCompatible = 0, + name: str | None = None, +) -> Tensor: ... +def shape(input: SparseTensorCompatible, out_type: DTypeLike | None = None, name: str | None = None) -> Tensor: ... +def where( + condition: TensorCompatible, x: TensorCompatible | None = None, y: TensorCompatible | None = None, name: str | None = None +) -> Tensor: ... +def gather_nd( + params: TensorCompatible, + indices: UIntTensorCompatible, + batch_dims: UIntTensorCompatible = 0, + name: str | None = None, + bad_indices_policy: Literal["", "DEFAULT", "ERROR", "IGNORE"] = "", +) -> Tensor: ... +def __getattr__(name: str): ... # incomplete module diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/_aliases.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/_aliases.pyi new file mode 100644 index 0000000000..6bc6918a6b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/_aliases.pyi @@ -0,0 +1,70 @@ +# Commonly used type aliases. +# Everything in this module is private for stubs. There is no runtime equivalent. + +from collections.abc import Iterable, Mapping, Sequence +from typing import Any, Protocol, TypeVar +from typing_extensions import TypeAlias + +import numpy as np +import numpy.typing as npt +import tensorflow as tf +from tensorflow.dtypes import DType +from tensorflow.keras.layers import InputSpec + +_T = TypeVar("_T") +ContainerGeneric: TypeAlias = Mapping[str, ContainerGeneric[_T]] | Sequence[ContainerGeneric[_T]] | _T + +TensorLike: TypeAlias = tf.Tensor | tf.RaggedTensor | tf.SparseTensor +SparseTensorLike: TypeAlias = tf.Tensor | tf.SparseTensor +RaggedTensorLike: TypeAlias = tf.Tensor | tf.RaggedTensor +# _RaggedTensorLikeT = TypeVar("_RaggedTensorLikeT", tf.Tensor, tf.RaggedTensor) +Gradients: TypeAlias = tf.Tensor | tf.IndexedSlices + +class KerasSerializable1(Protocol): + def get_config(self) -> dict[str, Any]: ... + +class KerasSerializable2(Protocol): + __name__: str + +KerasSerializable: TypeAlias = KerasSerializable1 | KerasSerializable2 + +TensorValue: TypeAlias = tf.Tensor # Alias for a 0D Tensor +Integer: TypeAlias = TensorValue | int | IntArray | np.number[Any] # Here IntArray are assumed to be 0D. +Float: TypeAlias = Integer | float | FloatArray +Slice: TypeAlias = tf.Tensor | tf.RaggedTensor | int | slice | None +FloatDataSequence: TypeAlias = Sequence[float] | Sequence[FloatDataSequence] +IntDataSequence: TypeAlias = Sequence[int] | Sequence[IntDataSequence] +StrDataSequence: TypeAlias = Sequence[str] | Sequence[StrDataSequence] +DataSequence: TypeAlias = FloatDataSequence | StrDataSequence | IntDataSequence +ScalarTensorCompatible: TypeAlias = tf.Tensor | str | float | np.ndarray[Any, Any] | np.number[Any] +UIntTensorCompatible: TypeAlias = tf.Tensor | int | UIntArray +FloatTensorCompatible: TypeAlias = tf.Tensor | int | IntArray | float | FloatArray | np.number[Any] +StringTensorCompatible: TypeAlias = tf.Tensor | str | npt.NDArray[np.str_] | Sequence[StringTensorCompatible] + +TensorCompatible: TypeAlias = ScalarTensorCompatible | Sequence[TensorCompatible] +# _TensorCompatibleT = TypeVar("_TensorCompatibleT", bound=TensorCompatible) +# Sparse tensors are very annoying. Some operations work on them, but many do not. +# You will need to manually verify if an operation supports them. SparseTensorCompatible is intended to be a +# broader type than TensorCompatible and not all operations will support broader version. If unsure, +# use TensorCompatible instead. +SparseTensorCompatible: TypeAlias = TensorCompatible | tf.SparseTensor +# TensorFlow tries to convert anything passed as input. Meaning that even if, for example, only a Tensor of int32 +# is allowed, a numpy array of strings that can be converted to int32 will work. Therefore having anything more specific +# then AnyArray might cause false positives, while AnyArray might cause false negatives. +TensorOrArray: TypeAlias = tf.Tensor | AnyArray + +ShapeLike: TypeAlias = tf.TensorShape | Iterable[ScalarTensorCompatible | None] | int | tf.Tensor +DTypeLike: TypeAlias = DType | str | np.dtype[Any] | int + +ContainerTensors: TypeAlias = ContainerGeneric[tf.Tensor] +ContainerTensorsLike: TypeAlias = ContainerGeneric[TensorLike] +ContainerTensorCompatible: TypeAlias = ContainerGeneric[TensorCompatible] +ContainerGradients: TypeAlias = ContainerGeneric[Gradients] +ContainerTensorShape: TypeAlias = ContainerGeneric[tf.TensorShape] +ContainerInputSpec: TypeAlias = ContainerGeneric[InputSpec] + +AnyArray: TypeAlias = npt.NDArray[Any] +FloatArray: TypeAlias = npt.NDArray[np.float16 | np.float32 | np.float64] +UIntArray: TypeAlias = npt.NDArray[np.uint | np.uint8 | np.uint16 | np.uint32 | np.uint64] +SignedIntArray: TypeAlias = npt.NDArray[np.int_ | np.int8 | np.int16 | np.int32 | np.int64] +IntArray: TypeAlias = UIntArray | SignedIntArray diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/audio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/audio.pyi new file mode 100644 index 0000000000..480c72ad2e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/audio.pyi @@ -0,0 +1,7 @@ +import tensorflow as tf +from tensorflow._aliases import Integer, StringTensorCompatible + +def decode_wav( + contents: StringTensorCompatible, desired_channels: int = -1, desired_samples: int = -1, name: str | None = None +) -> tuple[tf.Tensor, tf.Tensor]: ... +def encode_wav(audio: tf.Tensor, sample_rate: Integer, name: str | None = None) -> tf.Tensor: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autodiff.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autodiff.pyi new file mode 100644 index 0000000000..47f5e1bb91 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autodiff.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete +from builtins import bool as _bool +from collections.abc import Generator, Mapping, Sequence +from contextlib import contextmanager +from types import TracebackType +from typing import overload +from typing_extensions import Self + +import tensorflow as tf +from tensorflow import Tensor, UnconnectedGradients, Variable +from tensorflow._aliases import ContainerGradients, ContainerTensors, ContainerTensorsLike, Gradients, TensorLike + +class ForwardAccumulator: + def __init__(self, primals: Tensor, tangents: Tensor) -> None: ... + def jvp( + self, primals: Tensor, unconnected_gradients: tf.UnconnectedGradients = tf.UnconnectedGradients.NONE # noqa: Y011 + ) -> Tensor | None: ... + def __enter__(self) -> Self: ... + def __exit__(self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None) -> None: ... + +class GradientTape: + def __init__(self, persistent: _bool = False, watch_accessed_variables: _bool = True) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None) -> None: ... + # Higher kinded types would be nice here and these overloads are a way to simulate some of them. + @overload + def gradient( + self, + target: ContainerTensors, + sources: TensorLike, + output_gradients: list[Tensor] | None = None, + unconnected_gradients: UnconnectedGradients = ..., + ) -> Gradients: ... + @overload + def gradient( + self, + target: ContainerTensors, + sources: Sequence[Tensor], + output_gradients: list[Tensor] | None = None, + unconnected_gradients: UnconnectedGradients = ..., + ) -> list[Gradients]: ... + @overload + def gradient( + self, + target: ContainerTensors, + sources: Mapping[str, Tensor], + output_gradients: list[Tensor] | None = None, + unconnected_gradients: UnconnectedGradients = ..., + ) -> dict[str, Gradients]: ... + @overload + def gradient( + self, + target: ContainerTensors, + sources: ContainerTensors, + output_gradients: list[Tensor] | None = None, + unconnected_gradients: UnconnectedGradients = ..., + ) -> ContainerGradients: ... + @contextmanager + def stop_recording(self) -> Generator[None]: ... + def reset(self) -> None: ... + def watch(self, tensor: ContainerTensorsLike) -> None: ... + def watched_variables(self) -> tuple[Variable, ...]: ... + def __getattr__(self, name: str) -> Incomplete: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/__init__.pyi new file mode 100644 index 0000000000..4058de0c3e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/__init__.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from typing import Any, TypeVar + +from tensorflow.autograph.experimental import Feature + +_Type = TypeVar("_Type") + +def set_verbosity(level: int, alsologtostdout: bool = False) -> None: ... +def to_code( + entity: Callable[..., Any], + recursive: bool = True, + experimental_optional_features: None | Feature | tuple[Feature, ...] = None, +) -> str: ... +def to_graph( + entity: _Type, recursive: bool = True, experimental_optional_features: None | Feature | tuple[Feature, ...] = None +) -> _Type: ... +def trace(*args: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/experimental.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/experimental.pyi new file mode 100644 index 0000000000..738a6802dd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/autograph/experimental.pyi @@ -0,0 +1,30 @@ +from collections.abc import Callable, Iterable +from enum import Enum +from typing import TypeVar, overload +from typing_extensions import ParamSpec + +import tensorflow as tf +from tensorflow._aliases import Integer + +_Param = ParamSpec("_Param") +_RetType = TypeVar("_RetType") + +class Feature(Enum): + ALL = "ALL" + ASSERT_STATEMENTS = "ASSERT_STATEMENTS" + AUTO_CONTROL_DEPS = "AUTO_CONTROL_DEPS" + BUILTIN_FUNCTIONS = "BUILTIN_FUNCTIONS" + EQUALITY_OPERATORS = "EQUALITY_OPERATORS" + LISTS = "LISTS" + NAME_SCOPES = "NAME_SCOPES" + +@overload +def do_not_convert(func: Callable[_Param, _RetType]) -> Callable[_Param, _RetType]: ... +@overload +def do_not_convert(func: None = None) -> Callable[[Callable[_Param, _RetType]], Callable[_Param, _RetType]]: ... +def set_loop_options( + parallel_iterations: Integer = ..., + swap_memory: bool = ..., + maximum_iterations: Integer = ..., + shape_invariants: Iterable[tuple[tf.Tensor, tf.TensorShape]] = ..., +) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/bitwise.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/bitwise.pyi new file mode 100644 index 0000000000..2045d99873 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/bitwise.pyi @@ -0,0 +1,37 @@ +from typing import Any, overload +from typing_extensions import TypeAlias + +import numpy as np +import tensorflow as tf +from tensorflow._aliases import FloatArray, IntArray + +# The alias below is not fully accurate, since TensorFlow casts the inputs, they have some additional +# requirements. For example y needs to be castable into x's dtype. Moreover, x and y cannot both be booleans. +# Properly typing the bitwise functions would be overly complicated and unlikely to provide much benefits +# since most people use Tensors, it was therefore not done. +_BitwiseCompatible: TypeAlias = tf.Tensor | int | FloatArray | IntArray | np.number[Any] + +@overload +def bitwise_and(x: _BitwiseCompatible, y: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def bitwise_and(x: tf.RaggedTensor, y: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... +@overload +def bitwise_or(x: _BitwiseCompatible, y: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def bitwise_or(x: tf.RaggedTensor, y: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... +@overload +def bitwise_xor(x: _BitwiseCompatible, y: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def bitwise_xor(x: tf.RaggedTensor, y: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... +@overload +def invert(x: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def invert(x: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... +@overload +def left_shift(x: _BitwiseCompatible, y: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def left_shift(x: tf.RaggedTensor, y: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... +@overload +def right_shift(x: _BitwiseCompatible, y: _BitwiseCompatible, name: str | None = None) -> tf.Tensor: ... +@overload +def right_shift(x: tf.RaggedTensor, y: tf.RaggedTensor, name: str | None = None) -> tf.RaggedTensor: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi new file mode 100644 index 0000000000..9057959cc0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_pb2.pyi @@ -0,0 +1,2113 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +This proto file defines messages which represent the HLO module. This is a +full fidelity serialization of the c++ HLO constructs. + +Many of the protos below are simple 1-to-1 serializations of the +corresponding C++ classes, e.g., HloModule, HloComputation, and +HloInstruction. + +FIELD NAMES ARE IMPORTANT + +Unlike most protos, you can't safely change the names of fields, even if you +keep the numeric ids the same. This is because we sometimes serialize these +protos as JSON, which includes the field names in the serialization. +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.any_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import tensorflow.compiler.xla.xla_data_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _CustomCallSchedule: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _CustomCallScheduleEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallSchedule.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SCHEDULE_NONE: _CustomCallSchedule.ValueType # 0 + SCHEDULE_LATEST: _CustomCallSchedule.ValueType # 1 + SCHEDULE_EARLIEST: _CustomCallSchedule.ValueType # 2 + +class CustomCallSchedule(_CustomCallSchedule, metaclass=_CustomCallScheduleEnumTypeWrapper): ... + +SCHEDULE_NONE: CustomCallSchedule.ValueType # 0 +SCHEDULE_LATEST: CustomCallSchedule.ValueType # 1 +SCHEDULE_EARLIEST: CustomCallSchedule.ValueType # 2 +global___CustomCallSchedule = CustomCallSchedule + +class _CustomCallApiVersion: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _CustomCallApiVersionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CustomCallApiVersion.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + API_VERSION_UNSPECIFIED: _CustomCallApiVersion.ValueType # 0 + API_VERSION_ORIGINAL: _CustomCallApiVersion.ValueType # 1 + """The first version of the API, with the following signatures: + + CPU: + void do_custom_call(void* out, const void** in); + + GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len); + """ + API_VERSION_STATUS_RETURNING: _CustomCallApiVersion.ValueType # 2 + """When the ability to return success/failure status was added: + + CPU: + void do_custom_call(void* out, const void** in, + XlaCustomCallStatus* status); + + GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); + """ + API_VERSION_STATUS_RETURNING_UNIFIED: _CustomCallApiVersion.ValueType # 3 + """Fixes the API signatures on the CPU side of the version STATUS_RETURNING by + adding the opaque string so that the custom call API is consistent across + CPUs and GPUs. For GPUs, the behaviors invoked by + API_VERSION_STATUS_RETURNING and API_VERSION_STATUS_RETURNING_UNIFIED are + the same. + + CPU: + void do_custom_call(void* out, const void** in, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); + + GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); + """ + API_VERSION_TYPED_FFI: _CustomCallApiVersion.ValueType # 4 + """Api version implementing XLA runtime custom call calling convention. These + custom calls can be registered as an XLA runtime custom call (1) or as XLA + runtime FFI binding (2). + + This type of custom call uses custom ABI to pass type information along + with custom call arguments. Also it passes buffer arguments together with + data type, sizes and strides. + + Example: (XLA runtime custom call) + + absl::Status DoCustomCall(StridedMemrefView arg, float attr); + + CustomCall::Bind("custom_call") + .Arg() + .Attr("attr") + .To(DoCustomCall); + + (1) xla/runtime/custom_call.h + (2) xla/runtime/ffi/ffi.h + """ + +class CustomCallApiVersion(_CustomCallApiVersion, metaclass=_CustomCallApiVersionEnumTypeWrapper): + """The version of the API used by the custom call function. The signatures for + each version are given below. + TODO(b/189822916): Remove this enum when all clients are migrated to the + status-returning API. + """ + +API_VERSION_UNSPECIFIED: CustomCallApiVersion.ValueType # 0 +API_VERSION_ORIGINAL: CustomCallApiVersion.ValueType # 1 +"""The first version of the API, with the following signatures: + +CPU: + void do_custom_call(void* out, const void** in); + +GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len); +""" +API_VERSION_STATUS_RETURNING: CustomCallApiVersion.ValueType # 2 +"""When the ability to return success/failure status was added: + +CPU: + void do_custom_call(void* out, const void** in, + XlaCustomCallStatus* status); + +GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); +""" +API_VERSION_STATUS_RETURNING_UNIFIED: CustomCallApiVersion.ValueType # 3 +"""Fixes the API signatures on the CPU side of the version STATUS_RETURNING by +adding the opaque string so that the custom call API is consistent across +CPUs and GPUs. For GPUs, the behaviors invoked by +API_VERSION_STATUS_RETURNING and API_VERSION_STATUS_RETURNING_UNIFIED are +the same. + +CPU: + void do_custom_call(void* out, const void** in, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); + +GPU: + void do_custom_call(CUstream stream, void** buffers, + const char* opaque, size_t opaque_len, + XlaCustomCallStatus* status); +""" +API_VERSION_TYPED_FFI: CustomCallApiVersion.ValueType # 4 +"""Api version implementing XLA runtime custom call calling convention. These +custom calls can be registered as an XLA runtime custom call (1) or as XLA +runtime FFI binding (2). + +This type of custom call uses custom ABI to pass type information along +with custom call arguments. Also it passes buffer arguments together with +data type, sizes and strides. + +Example: (XLA runtime custom call) + + absl::Status DoCustomCall(StridedMemrefView arg, float attr); + + CustomCall::Bind("custom_call") + .Arg() + .Attr("attr") + .To(DoCustomCall); + +(1) xla/runtime/custom_call.h +(2) xla/runtime/ffi/ffi.h +""" +global___CustomCallApiVersion = CustomCallApiVersion + +class _Kind: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _KindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Kind.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + UNDEFINED_ALIAS: _Kind.ValueType # 0 + """Define a UNDEFINED_ALIAS equal to zero to get around the default-0 proto3 + behavior and missing has_*() APIs. + """ + MAY_ALIAS: _Kind.ValueType # 1 + """The buffers may or may not alias at runtime.""" + MUST_ALIAS: _Kind.ValueType # 2 + """The buffers must alias at runtime.""" + +class Kind(_Kind, metaclass=_KindEnumTypeWrapper): ... + +UNDEFINED_ALIAS: Kind.ValueType # 0 +"""Define a UNDEFINED_ALIAS equal to zero to get around the default-0 proto3 +behavior and missing has_*() APIs. +""" +MAY_ALIAS: Kind.ValueType # 1 +"""The buffers may or may not alias at runtime.""" +MUST_ALIAS: Kind.ValueType # 2 +"""The buffers must alias at runtime.""" +global___Kind = Kind + +@typing.final +class HloInstructionProto(google.protobuf.message.Message): + """Serialization of HloInstruction. + Next ID: 90 + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class SliceDimensions(google.protobuf.message.Message): + """Describes the [begin, end) index range and stride for slices.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_FIELD_NUMBER: builtins.int + LIMIT_FIELD_NUMBER: builtins.int + STRIDE_FIELD_NUMBER: builtins.int + start: builtins.int + limit: builtins.int + stride: builtins.int + def __init__( + self, *, start: builtins.int | None = ..., limit: builtins.int | None = ..., stride: builtins.int | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["limit", b"limit", "start", b"start", "stride", b"stride"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + OPCODE_FIELD_NUMBER: builtins.int + SHAPE_FIELD_NUMBER: builtins.int + METADATA_FIELD_NUMBER: builtins.int + LITERAL_FIELD_NUMBER: builtins.int + PARAMETER_NUMBER_FIELD_NUMBER: builtins.int + FUSION_KIND_FIELD_NUMBER: builtins.int + TUPLE_INDEX_FIELD_NUMBER: builtins.int + DIMENSIONS_FIELD_NUMBER: builtins.int + WINDOW_FIELD_NUMBER: builtins.int + CONVOLUTION_DIMENSION_NUMBERS_FIELD_NUMBER: builtins.int + FEATURE_GROUP_COUNT_FIELD_NUMBER: builtins.int + BATCH_GROUP_COUNT_FIELD_NUMBER: builtins.int + SLICE_DIMENSIONS_FIELD_NUMBER: builtins.int + EXPONENT_BITS_FIELD_NUMBER: builtins.int + MANTISSA_BITS_FIELD_NUMBER: builtins.int + DYNAMIC_SLICE_SIZES_FIELD_NUMBER: builtins.int + PADDING_CONFIG_FIELD_NUMBER: builtins.int + OUTFEED_CONFIG_FIELD_NUMBER: builtins.int + DISTRIBUTION_FIELD_NUMBER: builtins.int + EPSILON_FIELD_NUMBER: builtins.int + FEATURE_INDEX_FIELD_NUMBER: builtins.int + CHANNEL_ID_FIELD_NUMBER: builtins.int + INFEED_CONFIG_FIELD_NUMBER: builtins.int + CUSTOM_CALL_TARGET_FIELD_NUMBER: builtins.int + OUTFEED_SHAPE_FIELD_NUMBER: builtins.int + DOT_DIMENSION_NUMBERS_FIELD_NUMBER: builtins.int + FFT_TYPE_FIELD_NUMBER: builtins.int + FFT_LENGTH_FIELD_NUMBER: builtins.int + COMPARISON_DIRECTION_FIELD_NUMBER: builtins.int + GATHER_DIMENSION_NUMBERS_FIELD_NUMBER: builtins.int + GATHER_SLICE_SIZES_FIELD_NUMBER: builtins.int + ID_FIELD_NUMBER: builtins.int + OPERAND_IDS_FIELD_NUMBER: builtins.int + CONTROL_PREDECESSOR_IDS_FIELD_NUMBER: builtins.int + CALLED_COMPUTATION_IDS_FIELD_NUMBER: builtins.int + SHARDING_FIELD_NUMBER: builtins.int + BACKEND_CONFIG_FIELD_NUMBER: builtins.int + REPLICA_GROUPS_FIELD_NUMBER: builtins.int + ALL_REDUCE_ID_FIELD_NUMBER: builtins.int + USE_GLOBAL_DEVICE_IDS_FIELD_NUMBER: builtins.int + IS_HOST_TRANSFER_FIELD_NUMBER: builtins.int + IS_STABLE_FIELD_NUMBER: builtins.int + SCATTER_DIMENSION_NUMBERS_FIELD_NUMBER: builtins.int + PRECISION_CONFIG_FIELD_NUMBER: builtins.int + SOURCE_TARGET_PAIRS_FIELD_NUMBER: builtins.int + DOMAIN_ENTRY_SHARDING_FIELD_NUMBER: builtins.int + DOMAIN_EXIT_SHARDING_FIELD_NUMBER: builtins.int + CONSTRAIN_LAYOUT_FIELD_NUMBER: builtins.int + OPERAND_SHAPES_WITH_LAYOUT_FIELD_NUMBER: builtins.int + TRIANGULAR_SOLVE_OPTIONS_FIELD_NUMBER: builtins.int + CHOLESKY_OPTIONS_FIELD_NUMBER: builtins.int + PARAMETER_REPLICATION_FIELD_NUMBER: builtins.int + CUSTOM_CALL_HAS_SIDE_EFFECT_FIELD_NUMBER: builtins.int + OUTPUT_OPERAND_ALIASING_FIELD_NUMBER: builtins.int + CUSTOM_CALL_SCHEDULE_FIELD_NUMBER: builtins.int + DELTA_FIELD_NUMBER: builtins.int + INDICES_ARE_SORTED_FIELD_NUMBER: builtins.int + FRONTEND_ATTRIBUTES_FIELD_NUMBER: builtins.int + UNIQUE_INDICES_FIELD_NUMBER: builtins.int + RNG_ALGORITHM_FIELD_NUMBER: builtins.int + COMPARISON_TYPE_FIELD_NUMBER: builtins.int + IS_CROSS_PROGRAM_PREFETCH_FIELD_NUMBER: builtins.int + CROSS_PROGRAM_PREFETCH_INDEX_FIELD_NUMBER: builtins.int + PADDING_TYPE_FIELD_NUMBER: builtins.int + CUSTOM_CALL_API_VERSION_FIELD_NUMBER: builtins.int + ASYNC_EXECUTION_THREAD_FIELD_NUMBER: builtins.int + K_FIELD_NUMBER: builtins.int + LARGEST_FIELD_NUMBER: builtins.int + STATISTICS_VIZ_FIELD_NUMBER: builtins.int + DOT_SPARSITY_FIELD_NUMBER: builtins.int + COLLECTIVE_DEVICE_LIST_FIELD_NUMBER: builtins.int + ORIGINAL_VALUE_FIELD_NUMBER: builtins.int + IS_COMPOSITE_FIELD_NUMBER: builtins.int + name: builtins.str + opcode: builtins.str + parameter_number: builtins.int + """Parameter number is only present for kParameter.""" + fusion_kind: builtins.str + """Fusion state, only present for kFusion.""" + tuple_index: builtins.int + """Index for kGetTupleElement.""" + feature_group_count: builtins.int + """The number of feature groups. Used for a convolution. Must be a divisor of + the input feature dimension and output feature dimension. If not specified, + it will use a default value of 1. + """ + batch_group_count: builtins.int + exponent_bits: builtins.int + """The bit sizes for a reduce-precision operation.""" + mantissa_bits: builtins.int + outfeed_config: builtins.bytes + """Outfeed configuration information, only present for kOutfeed.""" + distribution: tensorflow.compiler.xla.xla_data_pb2.RandomDistribution.ValueType + """The distribution requested for random number generation. + Only present for kRng. + """ + epsilon: builtins.float + """A small float number added to the variance to avoid divide-by-zero error. + Only present for kBatchNormTraining, kBatchNormInference, and + kBatchNormGrad. + """ + feature_index: builtins.int + """An integer value representing the index of the feature dimension. + Only present for kBatchNormTraining, kBatchNormInference, and + kBatchNormGrad. + """ + channel_id: builtins.int + """Represents a unique identifier for each Send/Recv instruction pair or + optionally for collective instructions (AllReduce, CollectivePermute, + AllToAll). Non-positive channel_id is equivalent to no channel id. + """ + infeed_config: builtins.bytes + """The string representation of the infeed configuration.""" + custom_call_target: builtins.str + """Name of a external target (eg, global symbol) to call, only present for + kCustomCall. + """ + fft_type: tensorflow.compiler.xla.xla_data_pb2.FftType.ValueType + """FFT type (FFT, IFFT, etc).""" + comparison_direction: builtins.str + """Comparison direction only used for kCompare.""" + id: builtins.int + """The id of this instruction.""" + backend_config: builtins.bytes + """Backend configuration for the instruction. Has backend-specific meaning.""" + all_reduce_id: builtins.int + """Deprecated, but keeping it for backward compatibility. Use channel_id. + Non-positive all_reduce_id is equivalent to no all_reduce_id. + """ + use_global_device_ids: builtins.bool + """If true, interprets ids in ReplicaGroup as global device ids, which is + a linearized id of `replica_id * partition_count + partition_id`. + """ + is_host_transfer: builtins.bool + """Whether this Send/Recv instruction transfers data to/from the host. Only + present for Send and Recv instructions and their SendDone and RecvDone + partners. + """ + is_stable: builtins.bool + """Whether this Sort instruction should be stable.""" + constrain_layout: builtins.bool + """For custom call this indicates that the layouts are constrained. If + constrain_layout is true then the 'shape' field must contain a layout, and + 'operand_shapes_with_layout' must contain a shape with layout for each + operand. + """ + custom_call_has_side_effect: builtins.bool + """Whether the kCustomCall instruction has side-effects, only present for + kCustomCall. + """ + custom_call_schedule: global___CustomCallSchedule.ValueType + """Specifies the desired schedule for the custom-call. The field is only + present for custom-call. + """ + delta: builtins.int + """The delta value for kRngGetAndUpdateState.""" + indices_are_sorted: builtins.bool + """Specifies if the gather/scatter indices are guaranteed to be sorted by the + caller. + """ + unique_indices: builtins.bool + """Specifies if all elements updated are guaranteed to be unique by + the caller. + """ + rng_algorithm: tensorflow.compiler.xla.xla_data_pb2.RandomAlgorithm.ValueType + """RNG algorithm used by kRngBitGenerator.""" + comparison_type: builtins.str + """The comparison type used for kCompare.""" + is_cross_program_prefetch: builtins.bool + """Specifies if this is a cross-program-prefetch, used by kCopyStart. + Deprecated and replaced by optional_cross_program_prefetch_index. + """ + cross_program_prefetch_index: builtins.int + padding_type: tensorflow.compiler.xla.xla_data_pb2.PaddingType.ValueType + """If a convolution is dynamic, a dynamic padding type will be specified.""" + custom_call_api_version: global___CustomCallApiVersion.ValueType + """The API version used by the custom call function. This field is only + present for custom-call. + TODO(b/189822916): Remove this field when all clients are migrated to the + status-returning API. + """ + async_execution_thread: builtins.str + """Represents a unique execution thread name for one or more async groups. + Each HLO module may contain a main thread and one or more parallel threads. + Empty async_execution_thread is equivalent to main thread. + """ + k: builtins.int + """Represents the K value for top-k.""" + largest: builtins.bool + """Represents the largest flag for top-k.""" + is_composite: builtins.bool + """Specifies if a call instruction is a composite.""" + @property + def shape(self) -> tensorflow.compiler.xla.xla_data_pb2.ShapeProto: ... + @property + def metadata(self) -> tensorflow.compiler.xla.xla_data_pb2.OpMetadata: ... + @property + def literal(self) -> tensorflow.compiler.xla.xla_data_pb2.LiteralProto: + """Literal, only present for kConstant.""" + + @property + def dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Dimensions present for some operations that require reshaping or + broadcasting, including Reshape, Reduce, ReduceWindow, and Reverse. + """ + + @property + def window(self) -> tensorflow.compiler.xla.xla_data_pb2.Window: + """Describes the window in a windowed operation such as convolution.""" + + @property + def convolution_dimension_numbers(self) -> tensorflow.compiler.xla.xla_data_pb2.ConvolutionDimensionNumbers: + """Describes the dimension numbers used for a convolution.""" + + @property + def slice_dimensions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloInstructionProto.SliceDimensions]: ... + @property + def dynamic_slice_sizes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Describes the [start, start + size) range size for a dynamic slice + ('start' is specified dynamically in the second operand of the operation). + """ + + @property + def padding_config(self) -> tensorflow.compiler.xla.xla_data_pb2.PaddingConfig: + """The padding configuration that describes the edge padding and interior + padding of this pad instruction. Only set for pad instructions. + """ + + @property + def outfeed_shape(self) -> tensorflow.compiler.xla.xla_data_pb2.ShapeProto: + """Shape of outfeed request.""" + + @property + def dot_dimension_numbers(self) -> tensorflow.compiler.xla.xla_data_pb2.DotDimensionNumbers: + """Describes the dimension numbers used for a dot operation""" + + @property + def fft_length(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """FFT length.""" + + @property + def gather_dimension_numbers(self) -> tensorflow.compiler.xla.xla_data_pb2.GatherDimensionNumbers: + """Gather dimension numbers.""" + + @property + def gather_slice_sizes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def operand_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def control_predecessor_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def called_computation_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... + @property + def replica_groups( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ReplicaGroup]: + """Deprecated, but keeping for backward compatibility. + Use collective_device_list. Cross replica op fields. + """ + + @property + def scatter_dimension_numbers(self) -> tensorflow.compiler.xla.xla_data_pb2.ScatterDimensionNumbers: ... + @property + def precision_config(self) -> tensorflow.compiler.xla.xla_data_pb2.PrecisionConfig: + """Precision configuration for the instruction. Has backend-specific meaning.""" + + @property + def source_target_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.SourceTarget]: + """Collective permute field.""" + + @property + def domain_entry_sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: + """Sharding for kDomain instructions.""" + + @property + def domain_exit_sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... + @property + def operand_shapes_with_layout( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.ShapeProto]: ... + @property + def triangular_solve_options(self) -> tensorflow.compiler.xla.xla_data_pb2.TriangularSolveOptions: + """Options for TriangularSolve""" + + @property + def cholesky_options(self) -> tensorflow.compiler.xla.xla_data_pb2.CholeskyOptions: + """Options for Cholesky""" + + @property + def parameter_replication(self) -> tensorflow.compiler.xla.xla_data_pb2.ParameterReplication: + """Describes how parameters behave with regards to replicas.""" + + @property + def output_operand_aliasing( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing + ]: + """A list of OutputOperandAliasing pairs that specifies aliasing buffers + between output and operands for kCustomCall and kFusion. + """ + + @property + def frontend_attributes(self) -> tensorflow.compiler.xla.xla_data_pb2.FrontendAttributes: + """Frontend attributes to pass to the XLA backend.""" + + @property + def statistics_viz(self) -> tensorflow.compiler.xla.xla_data_pb2.StatisticsViz: + """Represents the information for tracking propagation of values within HLO + graph. + """ + + @property + def dot_sparsity( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.compiler.xla.xla_data_pb2.SparsityDescriptor + ]: + """Sparsity descriptor for dot operation.""" + + @property + def collective_device_list(self) -> tensorflow.compiler.xla.xla_data_pb2.CollectiveDeviceListProto: + """Represents the list of devices that participate in a collective operation.""" + + @property + def original_value(self) -> tensorflow.compiler.xla.xla_data_pb2.OriginalValueProto: + """For HLO value tracking.""" + + def __init__( + self, + *, + name: builtins.str | None = ..., + opcode: builtins.str | None = ..., + shape: tensorflow.compiler.xla.xla_data_pb2.ShapeProto | None = ..., + metadata: tensorflow.compiler.xla.xla_data_pb2.OpMetadata | None = ..., + literal: tensorflow.compiler.xla.xla_data_pb2.LiteralProto | None = ..., + parameter_number: builtins.int | None = ..., + fusion_kind: builtins.str | None = ..., + tuple_index: builtins.int | None = ..., + dimensions: collections.abc.Iterable[builtins.int] | None = ..., + window: tensorflow.compiler.xla.xla_data_pb2.Window | None = ..., + convolution_dimension_numbers: tensorflow.compiler.xla.xla_data_pb2.ConvolutionDimensionNumbers | None = ..., + feature_group_count: builtins.int | None = ..., + batch_group_count: builtins.int | None = ..., + slice_dimensions: collections.abc.Iterable[global___HloInstructionProto.SliceDimensions] | None = ..., + exponent_bits: builtins.int | None = ..., + mantissa_bits: builtins.int | None = ..., + dynamic_slice_sizes: collections.abc.Iterable[builtins.int] | None = ..., + padding_config: tensorflow.compiler.xla.xla_data_pb2.PaddingConfig | None = ..., + outfeed_config: builtins.bytes | None = ..., + distribution: tensorflow.compiler.xla.xla_data_pb2.RandomDistribution.ValueType | None = ..., + epsilon: builtins.float | None = ..., + feature_index: builtins.int | None = ..., + channel_id: builtins.int | None = ..., + infeed_config: builtins.bytes | None = ..., + custom_call_target: builtins.str | None = ..., + outfeed_shape: tensorflow.compiler.xla.xla_data_pb2.ShapeProto | None = ..., + dot_dimension_numbers: tensorflow.compiler.xla.xla_data_pb2.DotDimensionNumbers | None = ..., + fft_type: tensorflow.compiler.xla.xla_data_pb2.FftType.ValueType | None = ..., + fft_length: collections.abc.Iterable[builtins.int] | None = ..., + comparison_direction: builtins.str | None = ..., + gather_dimension_numbers: tensorflow.compiler.xla.xla_data_pb2.GatherDimensionNumbers | None = ..., + gather_slice_sizes: collections.abc.Iterable[builtins.int] | None = ..., + id: builtins.int | None = ..., + operand_ids: collections.abc.Iterable[builtins.int] | None = ..., + control_predecessor_ids: collections.abc.Iterable[builtins.int] | None = ..., + called_computation_ids: collections.abc.Iterable[builtins.int] | None = ..., + sharding: tensorflow.compiler.xla.xla_data_pb2.OpSharding | None = ..., + backend_config: builtins.bytes | None = ..., + replica_groups: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.ReplicaGroup] | None = ..., + all_reduce_id: builtins.int | None = ..., + use_global_device_ids: builtins.bool | None = ..., + is_host_transfer: builtins.bool | None = ..., + is_stable: builtins.bool | None = ..., + scatter_dimension_numbers: tensorflow.compiler.xla.xla_data_pb2.ScatterDimensionNumbers | None = ..., + precision_config: tensorflow.compiler.xla.xla_data_pb2.PrecisionConfig | None = ..., + source_target_pairs: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.SourceTarget] | None = ..., + domain_entry_sharding: tensorflow.compiler.xla.xla_data_pb2.OpSharding | None = ..., + domain_exit_sharding: tensorflow.compiler.xla.xla_data_pb2.OpSharding | None = ..., + constrain_layout: builtins.bool | None = ..., + operand_shapes_with_layout: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.ShapeProto] | None = ..., + triangular_solve_options: tensorflow.compiler.xla.xla_data_pb2.TriangularSolveOptions | None = ..., + cholesky_options: tensorflow.compiler.xla.xla_data_pb2.CholeskyOptions | None = ..., + parameter_replication: tensorflow.compiler.xla.xla_data_pb2.ParameterReplication | None = ..., + custom_call_has_side_effect: builtins.bool | None = ..., + output_operand_aliasing: ( + collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.OutputOperandAliasing] | None + ) = ..., + custom_call_schedule: global___CustomCallSchedule.ValueType | None = ..., + delta: builtins.int | None = ..., + indices_are_sorted: builtins.bool | None = ..., + frontend_attributes: tensorflow.compiler.xla.xla_data_pb2.FrontendAttributes | None = ..., + unique_indices: builtins.bool | None = ..., + rng_algorithm: tensorflow.compiler.xla.xla_data_pb2.RandomAlgorithm.ValueType | None = ..., + comparison_type: builtins.str | None = ..., + is_cross_program_prefetch: builtins.bool | None = ..., + cross_program_prefetch_index: builtins.int | None = ..., + padding_type: tensorflow.compiler.xla.xla_data_pb2.PaddingType.ValueType | None = ..., + custom_call_api_version: global___CustomCallApiVersion.ValueType | None = ..., + async_execution_thread: builtins.str | None = ..., + k: builtins.int | None = ..., + largest: builtins.bool | None = ..., + statistics_viz: tensorflow.compiler.xla.xla_data_pb2.StatisticsViz | None = ..., + dot_sparsity: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.SparsityDescriptor] | None = ..., + collective_device_list: tensorflow.compiler.xla.xla_data_pb2.CollectiveDeviceListProto | None = ..., + original_value: tensorflow.compiler.xla.xla_data_pb2.OriginalValueProto | None = ..., + is_composite: builtins.bool | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "cholesky_options", + b"cholesky_options", + "collective_device_list", + b"collective_device_list", + "convolution_dimension_numbers", + b"convolution_dimension_numbers", + "cross_program_prefetch_index", + b"cross_program_prefetch_index", + "domain_entry_sharding", + b"domain_entry_sharding", + "domain_exit_sharding", + b"domain_exit_sharding", + "dot_dimension_numbers", + b"dot_dimension_numbers", + "frontend_attributes", + b"frontend_attributes", + "gather_dimension_numbers", + b"gather_dimension_numbers", + "literal", + b"literal", + "metadata", + b"metadata", + "optional_cross_program_prefetch_index", + b"optional_cross_program_prefetch_index", + "original_value", + b"original_value", + "outfeed_shape", + b"outfeed_shape", + "padding_config", + b"padding_config", + "parameter_replication", + b"parameter_replication", + "precision_config", + b"precision_config", + "scatter_dimension_numbers", + b"scatter_dimension_numbers", + "shape", + b"shape", + "sharding", + b"sharding", + "statistics_viz", + b"statistics_viz", + "triangular_solve_options", + b"triangular_solve_options", + "window", + b"window", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "all_reduce_id", + b"all_reduce_id", + "async_execution_thread", + b"async_execution_thread", + "backend_config", + b"backend_config", + "batch_group_count", + b"batch_group_count", + "called_computation_ids", + b"called_computation_ids", + "channel_id", + b"channel_id", + "cholesky_options", + b"cholesky_options", + "collective_device_list", + b"collective_device_list", + "comparison_direction", + b"comparison_direction", + "comparison_type", + b"comparison_type", + "constrain_layout", + b"constrain_layout", + "control_predecessor_ids", + b"control_predecessor_ids", + "convolution_dimension_numbers", + b"convolution_dimension_numbers", + "cross_program_prefetch_index", + b"cross_program_prefetch_index", + "custom_call_api_version", + b"custom_call_api_version", + "custom_call_has_side_effect", + b"custom_call_has_side_effect", + "custom_call_schedule", + b"custom_call_schedule", + "custom_call_target", + b"custom_call_target", + "delta", + b"delta", + "dimensions", + b"dimensions", + "distribution", + b"distribution", + "domain_entry_sharding", + b"domain_entry_sharding", + "domain_exit_sharding", + b"domain_exit_sharding", + "dot_dimension_numbers", + b"dot_dimension_numbers", + "dot_sparsity", + b"dot_sparsity", + "dynamic_slice_sizes", + b"dynamic_slice_sizes", + "epsilon", + b"epsilon", + "exponent_bits", + b"exponent_bits", + "feature_group_count", + b"feature_group_count", + "feature_index", + b"feature_index", + "fft_length", + b"fft_length", + "fft_type", + b"fft_type", + "frontend_attributes", + b"frontend_attributes", + "fusion_kind", + b"fusion_kind", + "gather_dimension_numbers", + b"gather_dimension_numbers", + "gather_slice_sizes", + b"gather_slice_sizes", + "id", + b"id", + "indices_are_sorted", + b"indices_are_sorted", + "infeed_config", + b"infeed_config", + "is_composite", + b"is_composite", + "is_cross_program_prefetch", + b"is_cross_program_prefetch", + "is_host_transfer", + b"is_host_transfer", + "is_stable", + b"is_stable", + "k", + b"k", + "largest", + b"largest", + "literal", + b"literal", + "mantissa_bits", + b"mantissa_bits", + "metadata", + b"metadata", + "name", + b"name", + "opcode", + b"opcode", + "operand_ids", + b"operand_ids", + "operand_shapes_with_layout", + b"operand_shapes_with_layout", + "optional_cross_program_prefetch_index", + b"optional_cross_program_prefetch_index", + "original_value", + b"original_value", + "outfeed_config", + b"outfeed_config", + "outfeed_shape", + b"outfeed_shape", + "output_operand_aliasing", + b"output_operand_aliasing", + "padding_config", + b"padding_config", + "padding_type", + b"padding_type", + "parameter_number", + b"parameter_number", + "parameter_replication", + b"parameter_replication", + "precision_config", + b"precision_config", + "replica_groups", + b"replica_groups", + "rng_algorithm", + b"rng_algorithm", + "scatter_dimension_numbers", + b"scatter_dimension_numbers", + "shape", + b"shape", + "sharding", + b"sharding", + "slice_dimensions", + b"slice_dimensions", + "source_target_pairs", + b"source_target_pairs", + "statistics_viz", + b"statistics_viz", + "triangular_solve_options", + b"triangular_solve_options", + "tuple_index", + b"tuple_index", + "unique_indices", + b"unique_indices", + "use_global_device_ids", + b"use_global_device_ids", + "window", + b"window", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_cross_program_prefetch_index", b"optional_cross_program_prefetch_index"] + ) -> typing.Literal["cross_program_prefetch_index"] | None: ... + +global___HloInstructionProto = HloInstructionProto + +@typing.final +class HloComputationProto(google.protobuf.message.Message): + """Serialization of HloComputation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + INSTRUCTIONS_FIELD_NUMBER: builtins.int + PROGRAM_SHAPE_FIELD_NUMBER: builtins.int + ID_FIELD_NUMBER: builtins.int + ROOT_ID_FIELD_NUMBER: builtins.int + IS_FUSION_COMPUTATION_FIELD_NUMBER: builtins.int + EXECUTION_THREAD_FIELD_NUMBER: builtins.int + name: builtins.str + id: builtins.int + """The id of this computation.""" + root_id: builtins.int + """The id of the root of the computation.""" + is_fusion_computation: builtins.bool + """Whether this is a fusion computation. Fusion computations should use this + to determine whether they are a fusion in CreateFromProto since the + parent fusion_instruction_ may get removed and be nullptr. + """ + execution_thread: builtins.str + """The name of execution thread this computation belongs to.""" + @property + def instructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloInstructionProto]: + """The array of instructions is always in a valid dependency order, where + operands appear before their users. + """ + + @property + def program_shape(self) -> tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto: + """The program shape (with layout) of this computation.""" + + def __init__( + self, + *, + name: builtins.str | None = ..., + instructions: collections.abc.Iterable[global___HloInstructionProto] | None = ..., + program_shape: tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto | None = ..., + id: builtins.int | None = ..., + root_id: builtins.int | None = ..., + is_fusion_computation: builtins.bool | None = ..., + execution_thread: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["program_shape", b"program_shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "execution_thread", + b"execution_thread", + "id", + b"id", + "instructions", + b"instructions", + "is_fusion_computation", + b"is_fusion_computation", + "name", + b"name", + "program_shape", + b"program_shape", + "root_id", + b"root_id", + ], + ) -> None: ... + +global___HloComputationProto = HloComputationProto + +@typing.final +class HloScheduleProto(google.protobuf.message.Message): + """Serialization of an HLO schedule. An HLO schedule contains a total order of + instructions for each non-fusion computation in the module. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class InstructionSequence(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INSTRUCTION_IDS_FIELD_NUMBER: builtins.int + @property + def instruction_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, *, instruction_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["instruction_ids", b"instruction_ids"]) -> None: ... + + @typing.final + class SequencesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.int + @property + def value(self) -> global___HloScheduleProto.InstructionSequence: ... + def __init__( + self, *, key: builtins.int | None = ..., value: global___HloScheduleProto.InstructionSequence | None = ... + ) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + SEQUENCES_FIELD_NUMBER: builtins.int + @property + def sequences( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___HloScheduleProto.InstructionSequence]: + """Map from computation id to sequence.""" + + def __init__( + self, *, sequences: collections.abc.Mapping[builtins.int, global___HloScheduleProto.InstructionSequence] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["sequences", b"sequences"]) -> None: ... + +global___HloScheduleProto = HloScheduleProto + +@typing.final +class HloInputOutputAliasProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class AliasEntryProto(google.protobuf.message.Message): + """The following proto describes a pair of aliased an input + (described by parameter number and a ShapeIndex of the parameter) + and an output (described by a ShapeIndex of the root + instruction). For example: + + entry = { + output_shape_index={1}, + parameter_number=0, + parameter_shape_index={1, 2}, + } + + This entry indicates that the first parameter's {1, 2} element is + aliased with the {1} element of the root instruction. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + OUTPUT_SHAPE_INDEX_FIELD_NUMBER: builtins.int + PARAMETER_NUMBER_FIELD_NUMBER: builtins.int + PARAMETER_SHAPE_INDEX_FIELD_NUMBER: builtins.int + KIND_FIELD_NUMBER: builtins.int + parameter_number: builtins.int + """Number of the parameter in entry computation.""" + kind: global___Kind.ValueType + """The kind of alias to be setup.""" + @property + def output_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """ShapeIndex of the root hlo.""" + + @property + def parameter_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """ShapeIndex of the parameter instruction.""" + + def __init__( + self, + *, + output_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + parameter_number: builtins.int | None = ..., + parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + kind: global___Kind.ValueType | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "kind", + b"kind", + "output_shape_index", + b"output_shape_index", + "parameter_number", + b"parameter_number", + "parameter_shape_index", + b"parameter_shape_index", + ], + ) -> None: ... + + ENTRIES_FIELD_NUMBER: builtins.int + @property + def entries( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloInputOutputAliasProto.AliasEntryProto + ]: ... + def __init__( + self, *, entries: collections.abc.Iterable[global___HloInputOutputAliasProto.AliasEntryProto] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["entries", b"entries"]) -> None: ... + +global___HloInputOutputAliasProto = HloInputOutputAliasProto + +@typing.final +class HloBufferDonorProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BufferDonorEntryProto(google.protobuf.message.Message): + """The following proto describes an input (described by parameter number and a + ShapeIndex of the parameter) that can donate its butter to any output + tensor. It is similar to HloInputOutputAliasProto, but without a paired + output. For example: + + entry = { + parameter_number=0, + parameter_shape_index={1, 2}, + } + + This entry indicates that the first parameter's {1, 2} element can donate + its buffer. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARAMETER_NUMBER_FIELD_NUMBER: builtins.int + PARAMETER_SHAPE_INDEX_FIELD_NUMBER: builtins.int + parameter_number: builtins.int + """Number of the parameter in entry computation.""" + @property + def parameter_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """ShapeIndex of the parameter instruction.""" + + def __init__( + self, + *, + parameter_number: builtins.int | None = ..., + parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "parameter_number", b"parameter_number", "parameter_shape_index", b"parameter_shape_index" + ], + ) -> None: ... + + ENTRIES_FIELD_NUMBER: builtins.int + @property + def entries( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloBufferDonorProto.BufferDonorEntryProto + ]: ... + def __init__( + self, *, entries: collections.abc.Iterable[global___HloBufferDonorProto.BufferDonorEntryProto] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["entries", b"entries"]) -> None: ... + +global___HloBufferDonorProto = HloBufferDonorProto + +@typing.final +class CrossProgramPrefetch(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARAMETER_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + OFFSET_FIELD_NUMBER: builtins.int + parameter: builtins.int + offset: builtins.int + @property + def index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + parameter: builtins.int | None = ..., + index: collections.abc.Iterable[builtins.int] | None = ..., + offset: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["index", b"index", "offset", b"offset", "parameter", b"parameter"] + ) -> None: ... + +global___CrossProgramPrefetch = CrossProgramPrefetch + +@typing.final +class StackFrameIndexProto(google.protobuf.message.Message): + """Serialization of stack frames index representations. + Stack frames index presented in four flat arrays: + 1. File names array. + 2. Function names array. + 3. File location array. + 4. Frame array. + All reference ids in sub-protos are 1-based positions of the + entity in the flat array. + Ids are 1-based to keep 0 value as representation of non-set property. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class FileLocation(google.protobuf.message.Message): + """Serialization of file position.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_NAME_ID_FIELD_NUMBER: builtins.int + FUNCTION_NAME_ID_FIELD_NUMBER: builtins.int + LINE_FIELD_NUMBER: builtins.int + COLUMN_FIELD_NUMBER: builtins.int + file_name_id: builtins.int + """1-based position of file name.""" + function_name_id: builtins.int + """1-based position of function name.""" + line: builtins.int + """Line number.""" + column: builtins.int + """Column number.""" + def __init__( + self, + *, + file_name_id: builtins.int | None = ..., + function_name_id: builtins.int | None = ..., + line: builtins.int | None = ..., + column: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "column", b"column", "file_name_id", b"file_name_id", "function_name_id", b"function_name_id", "line", b"line" + ], + ) -> None: ... + + @typing.final + class StackFrame(google.protobuf.message.Message): + """Serialization of frame.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_LOCATION_ID_FIELD_NUMBER: builtins.int + PARENT_FRAME_ID_FIELD_NUMBER: builtins.int + file_location_id: builtins.int + """1-based position of file location.""" + parent_frame_id: builtins.int + """1-based position of the parent frame.""" + def __init__( + self, *, file_location_id: builtins.int | None = ..., parent_frame_id: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["file_location_id", b"file_location_id", "parent_frame_id", b"parent_frame_id"] + ) -> None: ... + + FILE_NAMES_FIELD_NUMBER: builtins.int + FUNCTION_NAMES_FIELD_NUMBER: builtins.int + FILE_LOCATIONS_FIELD_NUMBER: builtins.int + STACK_FRAMES_FIELD_NUMBER: builtins.int + @property + def file_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Flat index array of file names.""" + + @property + def function_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Flat index array of function names.""" + + @property + def file_locations( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.FileLocation]: + """Flat index array of file locations.""" + + @property + def stack_frames( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StackFrameIndexProto.StackFrame]: + """Flat index array of frames.""" + + def __init__( + self, + *, + file_names: collections.abc.Iterable[builtins.str] | None = ..., + function_names: collections.abc.Iterable[builtins.str] | None = ..., + file_locations: collections.abc.Iterable[global___StackFrameIndexProto.FileLocation] | None = ..., + stack_frames: collections.abc.Iterable[global___StackFrameIndexProto.StackFrame] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "file_locations", + b"file_locations", + "file_names", + b"file_names", + "function_names", + b"function_names", + "stack_frames", + b"stack_frames", + ], + ) -> None: ... + +global___StackFrameIndexProto = StackFrameIndexProto + +@typing.final +class HloModuleProto(google.protobuf.message.Message): + """Serialization of HloModule.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ProfileType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ProfileTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleProto._ProfileType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: HloModuleProto._ProfileType.ValueType # 0 + FLAG: HloModuleProto._ProfileType.ValueType # 1 + FUSION: HloModuleProto._ProfileType.ValueType # 2 + LAYOUT: HloModuleProto._ProfileType.ValueType # 3 + DOT: HloModuleProto._ProfileType.ValueType # 4 + FLAGNET: HloModuleProto._ProfileType.ValueType # 5 + + class ProfileType(_ProfileType, metaclass=_ProfileTypeEnumTypeWrapper): + """The type of optimization profile in use for module-level optimizations.""" + + INVALID: HloModuleProto.ProfileType.ValueType # 0 + FLAG: HloModuleProto.ProfileType.ValueType # 1 + FUSION: HloModuleProto.ProfileType.ValueType # 2 + LAYOUT: HloModuleProto.ProfileType.ValueType # 3 + DOT: HloModuleProto.ProfileType.ValueType # 4 + FLAGNET: HloModuleProto.ProfileType.ValueType # 5 + + @typing.final + class ProfileInfo(google.protobuf.message.Message): + """Information about the optimization profile that this module contains.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROFILE_TYPE_FIELD_NUMBER: builtins.int + RELATIVE_SPEEDUP_FIELD_NUMBER: builtins.int + PROFILE_SOURCE_FIELD_NUMBER: builtins.int + COMPILATION_EVENT_FIELD_NUMBER: builtins.int + FINGERPRINT_FIELD_NUMBER: builtins.int + profile_type: global___HloModuleProto.ProfileType.ValueType + """The optimization profiles that this module contains.""" + relative_speedup: builtins.float + """Speedup of tuned config compared to default config.""" + profile_source: tensorflow.compiler.xla.xla_data_pb2.ProfileSource.ValueType + """The source of the optimization profile that this module contains.""" + compilation_event: tensorflow.compiler.xla.xla_data_pb2.CompilationEvent.ValueType + """The compilation event that triggered the use of the profile.""" + fingerprint: builtins.str + """The fingerprint of the unoptimized module this profile was applied to.""" + def __init__( + self, + *, + profile_type: global___HloModuleProto.ProfileType.ValueType | None = ..., + relative_speedup: builtins.float | None = ..., + profile_source: tensorflow.compiler.xla.xla_data_pb2.ProfileSource.ValueType | None = ..., + compilation_event: tensorflow.compiler.xla.xla_data_pb2.CompilationEvent.ValueType | None = ..., + fingerprint: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_event", + b"compilation_event", + "fingerprint", + b"fingerprint", + "profile_source", + b"profile_source", + "profile_type", + b"profile_type", + "relative_speedup", + b"relative_speedup", + ], + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + ENTRY_COMPUTATION_NAME_FIELD_NUMBER: builtins.int + ENTRY_COMPUTATION_ID_FIELD_NUMBER: builtins.int + COMPUTATIONS_FIELD_NUMBER: builtins.int + HOST_PROGRAM_SHAPE_FIELD_NUMBER: builtins.int + ID_FIELD_NUMBER: builtins.int + SCHEDULE_FIELD_NUMBER: builtins.int + INPUT_OUTPUT_ALIAS_FIELD_NUMBER: builtins.int + BUFFER_DONOR_FIELD_NUMBER: builtins.int + CROSS_PROGRAM_PREFETCHES_FIELD_NUMBER: builtins.int + IS_DYNAMIC_FIELD_NUMBER: builtins.int + SPMD_OUTPUT_SHARDING_FIELD_NUMBER: builtins.int + SPMD_PARAMETERS_SHARDINGS_FIELD_NUMBER: builtins.int + USE_AUTO_SPMD_PARTITIONING_FIELD_NUMBER: builtins.int + PROFILE_INFO_FIELD_NUMBER: builtins.int + DEVICE_ASSIGNMENT_FIELD_NUMBER: builtins.int + STACK_FRAME_INDEX_FIELD_NUMBER: builtins.int + FRONTEND_ATTRIBUTES_FIELD_NUMBER: builtins.int + name: builtins.str + entry_computation_name: builtins.str + entry_computation_id: builtins.int + id: builtins.int + """The id of this module.""" + is_dynamic: builtins.bool + """True if the module contains dynamic computation.""" + use_auto_spmd_partitioning: builtins.bool + """Uses AutoSharding pass or not.""" + @property + def computations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloComputationProto]: + """The array of computations is always in a valid dependency order, where + callees appear before their callers. + """ + + @property + def host_program_shape(self) -> tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto: + """The host program shape (with layout) of the entry computation.""" + + @property + def schedule(self) -> global___HloScheduleProto: + """The schedule for this module.""" + + @property + def input_output_alias(self) -> global___HloInputOutputAliasProto: + """Describes alias information between inputs and outputs.""" + + @property + def buffer_donor(self) -> global___HloBufferDonorProto: + """Describes the information of input buffer donors.""" + + @property + def cross_program_prefetches( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CrossProgramPrefetch]: ... + @property + def spmd_output_sharding(self) -> tensorflow.compiler.xla.xla_data_pb2.OpSharding: ... + @property + def spmd_parameters_shardings( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.OpSharding]: ... + @property + def profile_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleProto.ProfileInfo]: + """Profile information for the HLO module.""" + + @property + def device_assignment(self) -> tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto: + """DeviceAssignment object information.""" + + @property + def stack_frame_index(self) -> global___StackFrameIndexProto: + """Stack frames index.""" + + @property + def frontend_attributes(self) -> tensorflow.compiler.xla.xla_data_pb2.FrontendAttributes: + """Frontend attributes to pass to the XLA backend.""" + + def __init__( + self, + *, + name: builtins.str | None = ..., + entry_computation_name: builtins.str | None = ..., + entry_computation_id: builtins.int | None = ..., + computations: collections.abc.Iterable[global___HloComputationProto] | None = ..., + host_program_shape: tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto | None = ..., + id: builtins.int | None = ..., + schedule: global___HloScheduleProto | None = ..., + input_output_alias: global___HloInputOutputAliasProto | None = ..., + buffer_donor: global___HloBufferDonorProto | None = ..., + cross_program_prefetches: collections.abc.Iterable[global___CrossProgramPrefetch] | None = ..., + is_dynamic: builtins.bool | None = ..., + spmd_output_sharding: tensorflow.compiler.xla.xla_data_pb2.OpSharding | None = ..., + spmd_parameters_shardings: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.OpSharding] | None = ..., + use_auto_spmd_partitioning: builtins.bool | None = ..., + profile_info: collections.abc.Iterable[global___HloModuleProto.ProfileInfo] | None = ..., + device_assignment: tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto | None = ..., + stack_frame_index: global___StackFrameIndexProto | None = ..., + frontend_attributes: tensorflow.compiler.xla.xla_data_pb2.FrontendAttributes | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "buffer_donor", + b"buffer_donor", + "device_assignment", + b"device_assignment", + "frontend_attributes", + b"frontend_attributes", + "host_program_shape", + b"host_program_shape", + "input_output_alias", + b"input_output_alias", + "schedule", + b"schedule", + "spmd_output_sharding", + b"spmd_output_sharding", + "stack_frame_index", + b"stack_frame_index", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_donor", + b"buffer_donor", + "computations", + b"computations", + "cross_program_prefetches", + b"cross_program_prefetches", + "device_assignment", + b"device_assignment", + "entry_computation_id", + b"entry_computation_id", + "entry_computation_name", + b"entry_computation_name", + "frontend_attributes", + b"frontend_attributes", + "host_program_shape", + b"host_program_shape", + "id", + b"id", + "input_output_alias", + b"input_output_alias", + "is_dynamic", + b"is_dynamic", + "name", + b"name", + "profile_info", + b"profile_info", + "schedule", + b"schedule", + "spmd_output_sharding", + b"spmd_output_sharding", + "spmd_parameters_shardings", + b"spmd_parameters_shardings", + "stack_frame_index", + b"stack_frame_index", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + ], + ) -> None: ... + +global___HloModuleProto = HloModuleProto + +@typing.final +class LogicalBufferProto(google.protobuf.message.Message): + """Serialization of LogicalBuffer.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Location(google.protobuf.message.Message): + """Location represents an instruction and its shape index, which uniquely + identifies a point where a buffer is needed. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INSTRUCTION_NAME_FIELD_NUMBER: builtins.int + INSTRUCTION_ID_FIELD_NUMBER: builtins.int + SHAPE_INDEX_FIELD_NUMBER: builtins.int + instruction_name: builtins.str + """TODO(b/239098765): Remove instruction_name and computation_name.""" + instruction_id: builtins.int + @property + def shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + instruction_name: builtins.str | None = ..., + instruction_id: builtins.int | None = ..., + shape_index: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_id", b"instruction_id", "instruction_name", b"instruction_name", "shape_index", b"shape_index" + ], + ) -> None: ... + + ID_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + DEFINED_AT_FIELD_NUMBER: builtins.int + COLOR_FIELD_NUMBER: builtins.int + id: builtins.int + size: builtins.int + color: builtins.int + @property + def defined_at(self) -> global___LogicalBufferProto.Location: + """The location where the buffer is defined.""" + + def __init__( + self, + *, + id: builtins.int | None = ..., + size: builtins.int | None = ..., + defined_at: global___LogicalBufferProto.Location | None = ..., + color: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["defined_at", b"defined_at"]) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["color", b"color", "defined_at", b"defined_at", "id", b"id", "size", b"size"] + ) -> None: ... + +global___LogicalBufferProto = LogicalBufferProto + +@typing.final +class BufferAllocationProto(google.protobuf.message.Message): + """Serialization of BufferAllocation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Assigned(google.protobuf.message.Message): + """Assigned represents a single LogicalBuffer that is assigned to this + BufferAllocation. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LOGICAL_BUFFER_ID_FIELD_NUMBER: builtins.int + OFFSET_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + logical_buffer_id: builtins.int + offset: builtins.int + size: builtins.int + def __init__( + self, + *, + logical_buffer_id: builtins.int | None = ..., + offset: builtins.int | None = ..., + size: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["logical_buffer_id", b"logical_buffer_id", "offset", b"offset", "size", b"size"] + ) -> None: ... + + INDEX_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + IS_THREAD_LOCAL_FIELD_NUMBER: builtins.int + IS_TUPLE_FIELD_NUMBER: builtins.int + IS_ENTRY_COMPUTATION_PARAMETER_FIELD_NUMBER: builtins.int + IS_CONSTANT_FIELD_NUMBER: builtins.int + PARAMETER_NUMBER_FIELD_NUMBER: builtins.int + PARAMETER_SHAPE_INDEX_FIELD_NUMBER: builtins.int + MAYBE_LIVE_OUT_FIELD_NUMBER: builtins.int + COLOR_FIELD_NUMBER: builtins.int + ASSIGNED_FIELD_NUMBER: builtins.int + index: builtins.int + size: builtins.int + is_thread_local: builtins.bool + is_tuple: builtins.bool + is_entry_computation_parameter: builtins.bool + is_constant: builtins.bool + parameter_number: builtins.int + maybe_live_out: builtins.bool + color: builtins.int + @property + def parameter_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def assigned( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto.Assigned]: ... + def __init__( + self, + *, + index: builtins.int | None = ..., + size: builtins.int | None = ..., + is_thread_local: builtins.bool | None = ..., + is_tuple: builtins.bool | None = ..., + is_entry_computation_parameter: builtins.bool | None = ..., + is_constant: builtins.bool | None = ..., + parameter_number: builtins.int | None = ..., + parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + maybe_live_out: builtins.bool | None = ..., + color: builtins.int | None = ..., + assigned: collections.abc.Iterable[global___BufferAllocationProto.Assigned] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "assigned", + b"assigned", + "color", + b"color", + "index", + b"index", + "is_constant", + b"is_constant", + "is_entry_computation_parameter", + b"is_entry_computation_parameter", + "is_thread_local", + b"is_thread_local", + "is_tuple", + b"is_tuple", + "maybe_live_out", + b"maybe_live_out", + "parameter_number", + b"parameter_number", + "parameter_shape_index", + b"parameter_shape_index", + "size", + b"size", + ], + ) -> None: ... + +global___BufferAllocationProto = BufferAllocationProto + +@typing.final +class HeapSimulatorTrace(google.protobuf.message.Message): + """A trace of a HeapSimulator run.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Event(google.protobuf.message.Message): + """The trace includes a list of events, where each event describes one action + performed by the heap simulator. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Kind: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _KindEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HeapSimulatorTrace.Event._Kind.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + ALLOC: HeapSimulatorTrace.Event._Kind.ValueType # 0 + """A memory region was allocated for the buffer.""" + FREE: HeapSimulatorTrace.Event._Kind.ValueType # 1 + """A memory region was freed for the buffer.""" + SHARE_WITH: HeapSimulatorTrace.Event._Kind.ValueType # 2 + """A buffer was shared with another (canonical) buffer. This is similar to + ALLOC, except that instead of allocating a new region of memory, the + memory region of the canonical buffer is directly re-used. Multiple + buffers may share with the same canonical buffer. The lifetime of the + canonical buffer is extended to the union of all lifetimes. + """ + + class Kind(_Kind, metaclass=_KindEnumTypeWrapper): ... + ALLOC: HeapSimulatorTrace.Event.Kind.ValueType # 0 + """A memory region was allocated for the buffer.""" + FREE: HeapSimulatorTrace.Event.Kind.ValueType # 1 + """A memory region was freed for the buffer.""" + SHARE_WITH: HeapSimulatorTrace.Event.Kind.ValueType # 2 + """A buffer was shared with another (canonical) buffer. This is similar to + ALLOC, except that instead of allocating a new region of memory, the + memory region of the canonical buffer is directly re-used. Multiple + buffers may share with the same canonical buffer. The lifetime of the + canonical buffer is extended to the union of all lifetimes. + """ + + KIND_FIELD_NUMBER: builtins.int + BUFFER_ID_FIELD_NUMBER: builtins.int + COMPUTATION_NAME_FIELD_NUMBER: builtins.int + INSTRUCTION_NAME_FIELD_NUMBER: builtins.int + SHARE_WITH_CANONICAL_ID_FIELD_NUMBER: builtins.int + kind: global___HeapSimulatorTrace.Event.Kind.ValueType + buffer_id: builtins.int + """The id of the LogicalBuffer that the event applies to.""" + computation_name: builtins.str + """The HloInstruction that the simulation was processing that caused this + event to occur, identified by its computation and instruction name. E.g. + buffers defined by instruction A are allocated when processing A. + """ + instruction_name: builtins.str + share_with_canonical_id: builtins.int + """The id of the canonical LogicalBuffer that the buffer shares with. Only + set for SHARE_WITH events. + """ + def __init__( + self, + *, + kind: global___HeapSimulatorTrace.Event.Kind.ValueType | None = ..., + buffer_id: builtins.int | None = ..., + computation_name: builtins.str | None = ..., + instruction_name: builtins.str | None = ..., + share_with_canonical_id: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_id", + b"buffer_id", + "computation_name", + b"computation_name", + "instruction_name", + b"instruction_name", + "kind", + b"kind", + "share_with_canonical_id", + b"share_with_canonical_id", + ], + ) -> None: ... + + EVENTS_FIELD_NUMBER: builtins.int + WHOLE_MODULE_SIMULATION_FIELD_NUMBER: builtins.int + BUFFER_ALLOCATION_INDEX_FIELD_NUMBER: builtins.int + whole_module_simulation: builtins.bool + buffer_allocation_index: builtins.int + @property + def events( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace.Event]: ... + def __init__( + self, + *, + events: collections.abc.Iterable[global___HeapSimulatorTrace.Event] | None = ..., + whole_module_simulation: builtins.bool | None = ..., + buffer_allocation_index: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_allocation_index", + b"buffer_allocation_index", + "events", + b"events", + "whole_module_simulation", + b"whole_module_simulation", + ], + ) -> None: ... + +global___HeapSimulatorTrace = HeapSimulatorTrace + +@typing.final +class HloModuleGroupProto(google.protobuf.message.Message): + """An abstraction representing a set of HLO module built to run concurrently + across different devices. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + HLO_MODULES_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def hlo_modules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleProto]: ... + def __init__( + self, *, name: builtins.str | None = ..., hlo_modules: collections.abc.Iterable[global___HloModuleProto] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["hlo_modules", b"hlo_modules", "name", b"name"]) -> None: ... + +global___HloModuleGroupProto = HloModuleGroupProto + +@typing.final +class BufferAssignmentProto(google.protobuf.message.Message): + """Serialization of BufferAssignment.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class BufferAlias(google.protobuf.message.Message): + """Alias represents a source LogicalBuffer, and the buffer location that + aliases it. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SOURCE_BUFFER_ID_FIELD_NUMBER: builtins.int + LOCATION_FIELD_NUMBER: builtins.int + source_buffer_id: builtins.int + @property + def location(self) -> global___LogicalBufferProto.Location: ... + def __init__( + self, *, source_buffer_id: builtins.int | None = ..., location: global___LogicalBufferProto.Location | None = ... + ) -> None: ... + def HasField(self, field_name: typing.Literal["location", b"location"]) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["location", b"location", "source_buffer_id", b"source_buffer_id"] + ) -> None: ... + + LOGICAL_BUFFERS_FIELD_NUMBER: builtins.int + BUFFER_ALIASES_FIELD_NUMBER: builtins.int + BUFFER_ALLOCATIONS_FIELD_NUMBER: builtins.int + HEAP_SIMULATOR_TRACES_FIELD_NUMBER: builtins.int + @property + def logical_buffers( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogicalBufferProto]: ... + @property + def buffer_aliases( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAssignmentProto.BufferAlias]: ... + @property + def buffer_allocations( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BufferAllocationProto]: ... + @property + def heap_simulator_traces( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HeapSimulatorTrace]: ... + def __init__( + self, + *, + logical_buffers: collections.abc.Iterable[global___LogicalBufferProto] | None = ..., + buffer_aliases: collections.abc.Iterable[global___BufferAssignmentProto.BufferAlias] | None = ..., + buffer_allocations: collections.abc.Iterable[global___BufferAllocationProto] | None = ..., + heap_simulator_traces: collections.abc.Iterable[global___HeapSimulatorTrace] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "buffer_aliases", + b"buffer_aliases", + "buffer_allocations", + b"buffer_allocations", + "heap_simulator_traces", + b"heap_simulator_traces", + "logical_buffers", + b"logical_buffers", + ], + ) -> None: ... + +global___BufferAssignmentProto = BufferAssignmentProto + +@typing.final +class HloProto(google.protobuf.message.Message): + """Grouping message that contains all of the information above.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HLO_MODULE_FIELD_NUMBER: builtins.int + BUFFER_ASSIGNMENT_FIELD_NUMBER: builtins.int + @property + def hlo_module(self) -> global___HloModuleProto: ... + @property + def buffer_assignment(self) -> global___BufferAssignmentProto: ... + def __init__( + self, *, hlo_module: global___HloModuleProto | None = ..., buffer_assignment: global___BufferAssignmentProto | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["buffer_assignment", b"buffer_assignment", "hlo_module", b"hlo_module"] + ) -> None: ... + +global___HloProto = HloProto + +@typing.final +class HloSnapshot(google.protobuf.message.Message): + """Encapsulates HloProto together with the arguments, result, and + execution_platform. This message is used for purposes such as + analysis/replay/file-storage. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HLO_FIELD_NUMBER: builtins.int + ARGUMENTS_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + EXECUTION_PLATFORM_FIELD_NUMBER: builtins.int + execution_platform: builtins.str + """The name of the platform used to run the graph.""" + @property + def hlo(self) -> global___HloProto: + """The hlo graph.""" + + @property + def arguments( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.LiteralProto]: + """The arguments passed to the graph.""" + + @property + def result(self) -> tensorflow.compiler.xla.xla_data_pb2.LiteralProto: + """The result of the graph.""" + + def __init__( + self, + *, + hlo: global___HloProto | None = ..., + arguments: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.LiteralProto] | None = ..., + result: tensorflow.compiler.xla.xla_data_pb2.LiteralProto | None = ..., + execution_platform: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["hlo", b"hlo", "result", b"result"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "arguments", b"arguments", "execution_platform", b"execution_platform", "hlo", b"hlo", "result", b"result" + ], + ) -> None: ... + +global___HloSnapshot = HloSnapshot + +@typing.final +class HloModuleMetadataProto(google.protobuf.message.Message): + """Metadata for an HLO module. Dumped after HLO passes and before LLO lowering + with filename module_####.metadata.textproto, where #### is + canonical_module_id. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CANONICAL_MODULE_ID_FIELD_NUMBER: builtins.int + MODULE_GROUP_NAME_FIELD_NUMBER: builtins.int + ORIGINAL_MODULE_ID_FIELD_NUMBER: builtins.int + PARTITIONED_MODULE_IDS_FIELD_NUMBER: builtins.int + PASS_METADATA_FIELD_NUMBER: builtins.int + canonical_module_id: builtins.int + """Uniquely identifies an HloModuleMetadata. Equal to the first unique_id + of the module (a module may go through multiple unique_ids). If a module + is partitioned into multiple modules, those modules will each have a new + HloModuleMetadata with a different canonical_module_id. + """ + module_group_name: builtins.str + """Name of the module group that the module is part of.""" + original_module_id: builtins.int + """The canonical module id of the module that this one is partitioned from, + if applicable. + """ + @property + def partitioned_module_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The canonical module ids of the modules that this one is partitioned into, + if applicable. + """ + + @property + def pass_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloPassMetadata]: + """Metadata for the HLO passes that are run on the module.""" + + def __init__( + self, + *, + canonical_module_id: builtins.int | None = ..., + module_group_name: builtins.str | None = ..., + original_module_id: builtins.int | None = ..., + partitioned_module_ids: collections.abc.Iterable[builtins.int] | None = ..., + pass_metadata: collections.abc.Iterable[global___HloPassMetadata] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "canonical_module_id", + b"canonical_module_id", + "module_group_name", + b"module_group_name", + "original_module_id", + b"original_module_id", + "partitioned_module_ids", + b"partitioned_module_ids", + "pass_metadata", + b"pass_metadata", + ], + ) -> None: ... + +global___HloModuleMetadataProto = HloModuleMetadataProto + +@typing.final +class HloPassMetadata(google.protobuf.message.Message): + """Metadata for one run of an HLO pass on a module. Provides more information + when processing debug dumps of HloProtos about the order of HLO passes and + various other stats like duration. `pass_id` may also be used to identify a + particular run of a pass in debug info that propagates through stages of + compilation. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PASS_ID_FIELD_NUMBER: builtins.int + PASS_NAME_FIELD_NUMBER: builtins.int + PIPELINE_NAME_FIELD_NUMBER: builtins.int + DUMP_FILENAMES_FIELD_NUMBER: builtins.int + MODULE_CHANGED_FIELD_NUMBER: builtins.int + MODULE_ID_FIELD_NUMBER: builtins.int + MODULE_GROUP_MODULE_IDS_FIELD_NUMBER: builtins.int + START_TIMESTAMP_USEC_FIELD_NUMBER: builtins.int + END_TIMESTAMP_USEC_FIELD_NUMBER: builtins.int + CUSTOM_METADATA_FIELD_NUMBER: builtins.int + pass_id: builtins.int + """For a given module, pass_id uniquely identifies a run of an HLO pass on + that module. Note that a pass_id may not always refer to the same pass + because the order of passes during compilation may change. For finding + metadata for a particular pass, pass_name and pipeline_name would be more + reliable, although note that they may not be unique. + """ + pass_name: builtins.str + pipeline_name: builtins.str + module_changed: builtins.bool + """Return value of pass.Run(). True if this pass changed the module, or, in + the case where the module was run through this pass as part of a module + group, true if this pass changed any module in the same module group. + """ + module_id: builtins.int + """The unique_id of the module that this pass is run on. May be different from + the canonical_module_id of the HloModuleMetadata that this HloPassMetadata + is inside. + """ + start_timestamp_usec: builtins.int + """Timestamp before and after the pass is run. Note they may be equal.""" + end_timestamp_usec: builtins.int + @property + def dump_filenames(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Filenames of the dumps of the module after this pass ran. Module may be + dumped in multiple formats, and the order of formats in this field will + stay consistent across passes. + """ + + @property + def module_group_module_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """If the module went through this pass as part of a module group, this is + set as the ids of all the modules in the module group. Empty otherwise. + """ + + @property + def custom_metadata(self) -> google.protobuf.any_pb2.Any: + """Custom metadata for the pass.""" + + def __init__( + self, + *, + pass_id: builtins.int | None = ..., + pass_name: builtins.str | None = ..., + pipeline_name: builtins.str | None = ..., + dump_filenames: collections.abc.Iterable[builtins.str] | None = ..., + module_changed: builtins.bool | None = ..., + module_id: builtins.int | None = ..., + module_group_module_ids: collections.abc.Iterable[builtins.int] | None = ..., + start_timestamp_usec: builtins.int | None = ..., + end_timestamp_usec: builtins.int | None = ..., + custom_metadata: google.protobuf.any_pb2.Any | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["custom_metadata", b"custom_metadata"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "custom_metadata", + b"custom_metadata", + "dump_filenames", + b"dump_filenames", + "end_timestamp_usec", + b"end_timestamp_usec", + "module_changed", + b"module_changed", + "module_group_module_ids", + b"module_group_module_ids", + "module_id", + b"module_id", + "pass_id", + b"pass_id", + "pass_name", + b"pass_name", + "pipeline_name", + b"pipeline_name", + "start_timestamp_usec", + b"start_timestamp_usec", + ], + ) -> None: ... + +global___HloPassMetadata = HloPassMetadata diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi new file mode 100644 index 0000000000..5699e8cc13 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/hlo_profile_printer_data_pb2.pyi @@ -0,0 +1,187 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2018 The OpenXLA Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class HloProfilePrinterData(google.protobuf.message.Message): + """Describes how to pretty-print a profile counter array gathered for a specific + HloModule. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class HloInstructionInfo(google.protobuf.message.Message): + """Pretty-printer information about an HloInstruction.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LONG_NAME_FIELD_NUMBER: builtins.int + SHORT_NAME_FIELD_NUMBER: builtins.int + CATEGORY_FIELD_NUMBER: builtins.int + FLOP_COUNT_FIELD_NUMBER: builtins.int + TRANSCENDENTAL_COUNT_FIELD_NUMBER: builtins.int + BYTES_ACCESSED_FIELD_NUMBER: builtins.int + OPTIMAL_SECONDS_FIELD_NUMBER: builtins.int + PROFILE_INDEX_FIELD_NUMBER: builtins.int + long_name: builtins.str + short_name: builtins.str + category: builtins.str + flop_count: builtins.float + """Metrics computed by HloCostAnalysis.""" + transcendental_count: builtins.float + bytes_accessed: builtins.int + optimal_seconds: builtins.float + profile_index: builtins.int + """The index into the profile counters array for the HloInstruction + corresponding to this HloInstructionInfo. + """ + def __init__( + self, + *, + long_name: builtins.str | None = ..., + short_name: builtins.str | None = ..., + category: builtins.str | None = ..., + flop_count: builtins.float | None = ..., + transcendental_count: builtins.float | None = ..., + bytes_accessed: builtins.int | None = ..., + optimal_seconds: builtins.float | None = ..., + profile_index: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_accessed", + b"bytes_accessed", + "category", + b"category", + "flop_count", + b"flop_count", + "long_name", + b"long_name", + "optimal_seconds", + b"optimal_seconds", + "profile_index", + b"profile_index", + "short_name", + b"short_name", + "transcendental_count", + b"transcendental_count", + ], + ) -> None: ... + + @typing.final + class HloComputationInfo(google.protobuf.message.Message): + """Pretty-printer information about an HloComputation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROFILE_INDEX_FIELD_NUMBER: builtins.int + INSTRUCTION_INFOS_FIELD_NUMBER: builtins.int + name: builtins.str + profile_index: builtins.int + """The index into the profile counters array for the HloComputation + corresponding to this HloComputationInfo. + """ + @property + def instruction_infos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___HloProfilePrinterData.HloInstructionInfo + ]: + """HloInstructionInfos for every HloInstruction in the HloComputation for + corresponding to this HloComputattionInfo. + """ + + def __init__( + self, + *, + name: builtins.str | None = ..., + profile_index: builtins.int | None = ..., + instruction_infos: collections.abc.Iterable[global___HloProfilePrinterData.HloInstructionInfo] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_infos", b"instruction_infos", "name", b"name", "profile_index", b"profile_index" + ], + ) -> None: ... + + @typing.final + class ExtraMetricsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.int + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + COMPUTATION_INFOS_FIELD_NUMBER: builtins.int + PROFILE_COUNTERS_SIZE_FIELD_NUMBER: builtins.int + EXTRA_METRICS_FIELD_NUMBER: builtins.int + ENTRY_COMPUTATION_FIELD_NUMBER: builtins.int + profile_counters_size: builtins.int + """The size of the profile counters array we will pretty-print.""" + entry_computation: builtins.str + """Name of the entry computation.""" + @property + def computation_infos( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloProfilePrinterData.HloComputationInfo]: + """HloComputationInfos for every HloComputation in the HloModule.""" + + @property + def extra_metrics(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: + """Maps extra metric name to the index into the profile counters array.""" + + def __init__( + self, + *, + computation_infos: collections.abc.Iterable[global___HloProfilePrinterData.HloComputationInfo] | None = ..., + profile_counters_size: builtins.int | None = ..., + extra_metrics: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., + entry_computation: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_infos", + b"computation_infos", + "entry_computation", + b"entry_computation", + "extra_metrics", + b"extra_metrics", + "profile_counters_size", + b"profile_counters_size", + ], + ) -> None: ... + +global___HloProfilePrinterData = HloProfilePrinterData diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi new file mode 100644 index 0000000000..05eaf73798 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/metrics_pb2.pyi @@ -0,0 +1,283 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.any_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class PassMetrics(google.protobuf.message.Message): + """Defines pass specific metrics.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MODULE_ID_FIELD_NUMBER: builtins.int + PASS_NAME_FIELD_NUMBER: builtins.int + PASS_DURATION_FIELD_NUMBER: builtins.int + CUSTOM_METRICS_FIELD_NUMBER: builtins.int + module_id: builtins.int + """Unique ID of the module on which the pass was run.""" + pass_name: builtins.str + """The name of the pass.""" + @property + def pass_duration(self) -> google.protobuf.duration_pb2.Duration: + """Duration of the pass.""" + + @property + def custom_metrics(self) -> google.protobuf.any_pb2.Any: + """Custom pass metrics. This is kept opaque, via `google.protobuf.Any`, in + order to decouple pass agnostic compilation logs from possibly proprietary + compiler passes. + """ + + def __init__( + self, + *, + module_id: builtins.int | None = ..., + pass_name: builtins.str | None = ..., + pass_duration: google.protobuf.duration_pb2.Duration | None = ..., + custom_metrics: google.protobuf.any_pb2.Any | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing.Literal["custom_metrics", b"custom_metrics", "pass_duration", b"pass_duration"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "custom_metrics", + b"custom_metrics", + "module_id", + b"module_id", + "pass_duration", + b"pass_duration", + "pass_name", + b"pass_name", + ], + ) -> None: ... + +global___PassMetrics = PassMetrics + +@typing.final +class JobInfo(google.protobuf.message.Message): + """Defines compilation job information.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CELL_FIELD_NUMBER: builtins.int + USER_FIELD_NUMBER: builtins.int + UID_FIELD_NUMBER: builtins.int + TASK_ID_FIELD_NUMBER: builtins.int + TASK_UID_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the job running compilation.""" + cell: builtins.str + """Cell in which the job is running.""" + user: builtins.str + """User running the job.""" + uid: builtins.int + """Unique id when combined with user and cell field.""" + task_id: builtins.int + """Task index, which will not change across job restarts.""" + task_uid: builtins.int + """Task unique id, which may change across job restarts.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + cell: builtins.str | None = ..., + user: builtins.str | None = ..., + uid: builtins.int | None = ..., + task_id: builtins.int | None = ..., + task_uid: builtins.int | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "_cell", + b"_cell", + "_name", + b"_name", + "_task_id", + b"_task_id", + "_task_uid", + b"_task_uid", + "_uid", + b"_uid", + "_user", + b"_user", + "cell", + b"cell", + "name", + b"name", + "task_id", + b"task_id", + "task_uid", + b"task_uid", + "uid", + b"uid", + "user", + b"user", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_cell", + b"_cell", + "_name", + b"_name", + "_task_id", + b"_task_id", + "_task_uid", + b"_task_uid", + "_uid", + b"_uid", + "_user", + b"_user", + "cell", + b"cell", + "name", + b"name", + "task_id", + b"task_id", + "task_uid", + b"task_uid", + "uid", + b"uid", + "user", + b"user", + ], + ) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_cell", b"_cell"]) -> typing.Literal["cell"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_name", b"_name"]) -> typing.Literal["name"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_task_id", b"_task_id"]) -> typing.Literal["task_id"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_task_uid", b"_task_uid"]) -> typing.Literal["task_uid"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_uid", b"_uid"]) -> typing.Literal["uid"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_user", b"_user"]) -> typing.Literal["user"] | None: ... + +global___JobInfo = JobInfo + +@typing.final +class CompilationLogEntry(google.protobuf.message.Message): + """Defines XLA compilation metrics.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _CompilationStage: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _CompilationStageEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationLogEntry._CompilationStage.ValueType], + builtins.type, + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + UNSPECIFIED: CompilationLogEntry._CompilationStage.ValueType # 0 + END_TO_END: CompilationLogEntry._CompilationStage.ValueType # 1 + HLO_PASSES: CompilationLogEntry._CompilationStage.ValueType # 2 + CODE_GENERATION: CompilationLogEntry._CompilationStage.ValueType # 3 + BACKEND_PASSES: CompilationLogEntry._CompilationStage.ValueType # 4 + + class CompilationStage(_CompilationStage, metaclass=_CompilationStageEnumTypeWrapper): + """Defines compilation stages for which metrics are collected.""" + + UNSPECIFIED: CompilationLogEntry.CompilationStage.ValueType # 0 + END_TO_END: CompilationLogEntry.CompilationStage.ValueType # 1 + HLO_PASSES: CompilationLogEntry.CompilationStage.ValueType # 2 + CODE_GENERATION: CompilationLogEntry.CompilationStage.ValueType # 3 + BACKEND_PASSES: CompilationLogEntry.CompilationStage.ValueType # 4 + + TIMESTAMP_FIELD_NUMBER: builtins.int + STAGE_FIELD_NUMBER: builtins.int + DURATION_FIELD_NUMBER: builtins.int + TASK_INDEX_FIELD_NUMBER: builtins.int + PASS_METRICS_FIELD_NUMBER: builtins.int + MODULE_IDS_FIELD_NUMBER: builtins.int + JOB_INFO_FIELD_NUMBER: builtins.int + stage: global___CompilationLogEntry.CompilationStage.ValueType + """Compilation stage recorded by this log entry.""" + task_index: builtins.int + """Task index from which this log entry was recorded or + -1 if the task index could not be fetched. In the case task_index is not + equal to -1, it is guaranteed to match the task_id in job_info. + """ + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when the event captured by this log entry occurred.""" + + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: + """Duration of the given compilation stage.""" + + @property + def pass_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PassMetrics]: + """Pass specific metrics.""" + + @property + def module_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """IDs of modules on which the compilation stage was run.""" + + @property + def job_info(self) -> global___JobInfo: + """Job information.""" + + def __init__( + self, + *, + timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + stage: global___CompilationLogEntry.CompilationStage.ValueType | None = ..., + duration: google.protobuf.duration_pb2.Duration | None = ..., + task_index: builtins.int | None = ..., + pass_metrics: collections.abc.Iterable[global___PassMetrics] | None = ..., + module_ids: collections.abc.Iterable[builtins.int] | None = ..., + job_info: global___JobInfo | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing.Literal["duration", b"duration", "job_info", b"job_info", "timestamp", b"timestamp"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "duration", + b"duration", + "job_info", + b"job_info", + "module_ids", + b"module_ids", + "pass_metrics", + b"pass_metrics", + "stage", + b"stage", + "task_index", + b"task_index", + "timestamp", + b"timestamp", + ], + ) -> None: ... + +global___CompilationLogEntry = CompilationLogEntry diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi new file mode 100644 index 0000000000..6b3b31b94f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/test_compilation_environment_pb2.pyi @@ -0,0 +1,59 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2022 The OpenXLA Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +""" + +import builtins +import typing + +import google.protobuf.descriptor +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class TestCompilationEnvironment1(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SOME_FLAG_FIELD_NUMBER: builtins.int + some_flag: builtins.int + def __init__(self, *, some_flag: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["some_flag", b"some_flag"]) -> None: ... + +global___TestCompilationEnvironment1 = TestCompilationEnvironment1 + +@typing.final +class TestCompilationEnvironment2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SOME_OTHER_FLAG_FIELD_NUMBER: builtins.int + some_other_flag: builtins.int + def __init__(self, *, some_other_flag: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["some_other_flag", b"some_other_flag"]) -> None: ... + +global___TestCompilationEnvironment2 = TestCompilationEnvironment2 + +@typing.final +class TestCompilationEnvironment3(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + A_THIRD_FLAG_FIELD_NUMBER: builtins.int + a_third_flag: builtins.int + def __init__(self, *, a_third_flag: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["a_third_flag", b"a_third_flag"]) -> None: ... + +global___TestCompilationEnvironment3 = TestCompilationEnvironment3 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi new file mode 100644 index 0000000000..e17f9559b6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/service/xla_compile_result_pb2.pyi @@ -0,0 +1,167 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2023 The OpenXLA Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.compiler.xla.service.hlo_pb2 +import tensorflow.tsl.protobuf.status_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class CompilerPerfStats(google.protobuf.message.Message): + """Statistics on how long various parts of compilation took. + Not all durations may be relevant for all producers of this message, in + which irrelevant fields should simply be skipped. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INIT_DURATION_FIELD_NUMBER: builtins.int + HLO_VERIFICATION_DURATION_FIELD_NUMBER: builtins.int + COMPILATION_PROLOGUE_DURATION_FIELD_NUMBER: builtins.int + COMPILATION_DURATION_FIELD_NUMBER: builtins.int + TOTAL_DURATION_FIELD_NUMBER: builtins.int + @property + def init_duration(self) -> google.protobuf.duration_pb2.Duration: + """How long did it take to initialize the compiler?""" + + @property + def hlo_verification_duration(self) -> google.protobuf.duration_pb2.Duration: + """How long did it take to verify the HLO?""" + + @property + def compilation_prologue_duration(self) -> google.protobuf.duration_pb2.Duration: + """How long did it take to prepare for compilation after verification?""" + + @property + def compilation_duration(self) -> google.protobuf.duration_pb2.Duration: + """How long did it take to compile?""" + + @property + def total_duration(self) -> google.protobuf.duration_pb2.Duration: + """How long did everything take?""" + + def __init__( + self, + *, + init_duration: google.protobuf.duration_pb2.Duration | None = ..., + hlo_verification_duration: google.protobuf.duration_pb2.Duration | None = ..., + compilation_prologue_duration: google.protobuf.duration_pb2.Duration | None = ..., + compilation_duration: google.protobuf.duration_pb2.Duration | None = ..., + total_duration: google.protobuf.duration_pb2.Duration | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "compilation_duration", + b"compilation_duration", + "compilation_prologue_duration", + b"compilation_prologue_duration", + "hlo_verification_duration", + b"hlo_verification_duration", + "init_duration", + b"init_duration", + "total_duration", + b"total_duration", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_duration", + b"compilation_duration", + "compilation_prologue_duration", + b"compilation_prologue_duration", + "hlo_verification_duration", + b"hlo_verification_duration", + "init_duration", + b"init_duration", + "total_duration", + b"total_duration", + ], + ) -> None: ... + +global___CompilerPerfStats = CompilerPerfStats + +@typing.final +class CompilationResult(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class CountersEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.int + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + HLO_MODULE_FIELD_NUMBER: builtins.int + PERF_STATS_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + COUNTERS_FIELD_NUMBER: builtins.int + @property + def hlo_module(self) -> tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto: + """The compiled HLO. Only set when compilation succeeds.""" + + @property + def perf_stats(self) -> global___CompilerPerfStats: + """Always set when compilation succeeds. May or may not be set when + compilation fails. + """ + + @property + def status(self) -> tensorflow.tsl.protobuf.status_pb2.StatusProto: + """Always set even when compilation succeeds.""" + + @property + def counters(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: + """Collects counters collected during compilation. Not every producer may + include counter support at all or any particular counter. + """ + + def __init__( + self, + *, + hlo_module: tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto | None = ..., + perf_stats: global___CompilerPerfStats | None = ..., + status: tensorflow.tsl.protobuf.status_pb2.StatusProto | None = ..., + counters: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing.Literal["hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "counters", b"counters", "hlo_module", b"hlo_module", "perf_stats", b"perf_stats", "status", b"status" + ], + ) -> None: ... + +global___CompilationResult = CompilationResult diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi new file mode 100644 index 0000000000..0fe33b725b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/bfc_memory_map_pb2.pyi @@ -0,0 +1,218 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class MemAllocatorStats(google.protobuf.message.Message): + """Some of the data from AllocatorStats""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NUM_ALLOCS_FIELD_NUMBER: builtins.int + BYTES_IN_USE_FIELD_NUMBER: builtins.int + PEAK_BYTES_IN_USE_FIELD_NUMBER: builtins.int + LARGEST_ALLOC_SIZE_FIELD_NUMBER: builtins.int + FRAGMENTATION_METRIC_FIELD_NUMBER: builtins.int + num_allocs: builtins.int + bytes_in_use: builtins.int + peak_bytes_in_use: builtins.int + largest_alloc_size: builtins.int + fragmentation_metric: builtins.float + def __init__( + self, + *, + num_allocs: builtins.int | None = ..., + bytes_in_use: builtins.int | None = ..., + peak_bytes_in_use: builtins.int | None = ..., + largest_alloc_size: builtins.int | None = ..., + fragmentation_metric: builtins.float | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_in_use", + b"bytes_in_use", + "fragmentation_metric", + b"fragmentation_metric", + "largest_alloc_size", + b"largest_alloc_size", + "num_allocs", + b"num_allocs", + "peak_bytes_in_use", + b"peak_bytes_in_use", + ], + ) -> None: ... + +global___MemAllocatorStats = MemAllocatorStats + +@typing.final +class MemChunk(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ADDRESS_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + REQUESTED_SIZE_FIELD_NUMBER: builtins.int + BIN_FIELD_NUMBER: builtins.int + OP_NAME_FIELD_NUMBER: builtins.int + FREED_AT_COUNT_FIELD_NUMBER: builtins.int + ACTION_COUNT_FIELD_NUMBER: builtins.int + IN_USE_FIELD_NUMBER: builtins.int + STEP_ID_FIELD_NUMBER: builtins.int + address: builtins.int + size: builtins.int + requested_size: builtins.int + bin: builtins.int + op_name: builtins.str + freed_at_count: builtins.int + action_count: builtins.int + in_use: builtins.bool + step_id: builtins.int + def __init__( + self, + *, + address: builtins.int | None = ..., + size: builtins.int | None = ..., + requested_size: builtins.int | None = ..., + bin: builtins.int | None = ..., + op_name: builtins.str | None = ..., + freed_at_count: builtins.int | None = ..., + action_count: builtins.int | None = ..., + in_use: builtins.bool | None = ..., + step_id: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "action_count", + b"action_count", + "address", + b"address", + "bin", + b"bin", + "freed_at_count", + b"freed_at_count", + "in_use", + b"in_use", + "op_name", + b"op_name", + "requested_size", + b"requested_size", + "size", + b"size", + "step_id", + b"step_id", + ], + ) -> None: ... + +global___MemChunk = MemChunk + +@typing.final +class BinSummary(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BIN_FIELD_NUMBER: builtins.int + TOTAL_BYTES_IN_USE_FIELD_NUMBER: builtins.int + TOTAL_BYTES_IN_BIN_FIELD_NUMBER: builtins.int + TOTAL_CHUNKS_IN_USE_FIELD_NUMBER: builtins.int + TOTAL_CHUNKS_IN_BIN_FIELD_NUMBER: builtins.int + bin: builtins.int + total_bytes_in_use: builtins.int + total_bytes_in_bin: builtins.int + total_chunks_in_use: builtins.int + total_chunks_in_bin: builtins.int + def __init__( + self, + *, + bin: builtins.int | None = ..., + total_bytes_in_use: builtins.int | None = ..., + total_bytes_in_bin: builtins.int | None = ..., + total_chunks_in_use: builtins.int | None = ..., + total_chunks_in_bin: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bin", + b"bin", + "total_bytes_in_bin", + b"total_bytes_in_bin", + "total_bytes_in_use", + b"total_bytes_in_use", + "total_chunks_in_bin", + b"total_chunks_in_bin", + "total_chunks_in_use", + b"total_chunks_in_use", + ], + ) -> None: ... + +global___BinSummary = BinSummary + +@typing.final +class SnapShot(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ACTION_COUNT_FIELD_NUMBER: builtins.int + SIZE_FIELD_NUMBER: builtins.int + action_count: builtins.int + size: builtins.int + def __init__(self, *, action_count: builtins.int | None = ..., size: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["action_count", b"action_count", "size", b"size"]) -> None: ... + +global___SnapShot = SnapShot + +@typing.final +class MemoryDump(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ALLOCATOR_NAME_FIELD_NUMBER: builtins.int + BIN_SUMMARY_FIELD_NUMBER: builtins.int + CHUNK_FIELD_NUMBER: builtins.int + SNAP_SHOT_FIELD_NUMBER: builtins.int + STATS_FIELD_NUMBER: builtins.int + allocator_name: builtins.str + @property + def bin_summary(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BinSummary]: ... + @property + def chunk(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemChunk]: ... + @property + def snap_shot(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SnapShot]: ... + @property + def stats(self) -> global___MemAllocatorStats: ... + def __init__( + self, + *, + allocator_name: builtins.str | None = ..., + bin_summary: collections.abc.Iterable[global___BinSummary] | None = ..., + chunk: collections.abc.Iterable[global___MemChunk] | None = ..., + snap_shot: collections.abc.Iterable[global___SnapShot] | None = ..., + stats: global___MemAllocatorStats | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["stats", b"stats"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocator_name", + b"allocator_name", + "bin_summary", + b"bin_summary", + "chunk", + b"chunk", + "snap_shot", + b"snap_shot", + "stats", + b"stats", + ], + ) -> None: ... + +global___MemoryDump = MemoryDump diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi new file mode 100644 index 0000000000..a0cd6ef6a4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/tsl/protobuf/test_log_pb2.pyi @@ -0,0 +1,707 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol messages for describing the results of benchmarks and unit tests.""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.any_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.wrappers_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class EntryValue(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + double_value: builtins.float + string_value: builtins.str + def __init__(self, *, double_value: builtins.float | None = ..., string_value: builtins.str | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"] + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["double_value", "string_value"] | None: ... + +global___EntryValue = EntryValue + +@typing.final +class MetricEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + MIN_VALUE_FIELD_NUMBER: builtins.int + MAX_VALUE_FIELD_NUMBER: builtins.int + name: builtins.str + """Metric name""" + value: builtins.float + """Metric value""" + @property + def min_value(self) -> google.protobuf.wrappers_pb2.DoubleValue: + """The minimum acceptable value for the metric if specified""" + + @property + def max_value(self) -> google.protobuf.wrappers_pb2.DoubleValue: + """The maximum acceptable value for the metric if specified""" + + def __init__( + self, + *, + name: builtins.str | None = ..., + value: builtins.float | None = ..., + min_value: google.protobuf.wrappers_pb2.DoubleValue | None = ..., + max_value: google.protobuf.wrappers_pb2.DoubleValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["max_value", b"max_value", "min_value", b"min_value"]) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["max_value", b"max_value", "min_value", b"min_value", "name", b"name", "value", b"value"] + ) -> None: ... + +global___MetricEntry = MetricEntry + +@typing.final +class BenchmarkEntry(google.protobuf.message.Message): + """Each unit test or benchmark in a test or benchmark run provides + some set of information. Here we provide some reasonable keys + one would expect to see, with optional key/value pairs for things + we haven't considered. + + This BenchmarkEntry should be emitted by each unit test or benchmark + reporter. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class ExtrasEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___EntryValue: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___EntryValue | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + ITERS_FIELD_NUMBER: builtins.int + CPU_TIME_FIELD_NUMBER: builtins.int + WALL_TIME_FIELD_NUMBER: builtins.int + THROUGHPUT_FIELD_NUMBER: builtins.int + EXTRAS_FIELD_NUMBER: builtins.int + METRICS_FIELD_NUMBER: builtins.int + name: builtins.str + """The name of the specific benchmark or test + (e.g. BM_AdjustContrast_gpu_B_W_H) + """ + iters: builtins.int + """If a benchmark, how many iterations it was run for""" + cpu_time: builtins.float + """Total cpu time used for all iterations (in seconds)""" + wall_time: builtins.float + """Total wall time used for all iterations (in seconds)""" + throughput: builtins.float + """Throughput (in MB/s)""" + @property + def extras(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___EntryValue]: + """Generic map from result key to value.""" + + @property + def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricEntry]: + """Metric name, value and expected range. This can include accuracy metrics + typically used to determine whether the accuracy test has passed + """ + + def __init__( + self, + *, + name: builtins.str | None = ..., + iters: builtins.int | None = ..., + cpu_time: builtins.float | None = ..., + wall_time: builtins.float | None = ..., + throughput: builtins.float | None = ..., + extras: collections.abc.Mapping[builtins.str, global___EntryValue] | None = ..., + metrics: collections.abc.Iterable[global___MetricEntry] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cpu_time", + b"cpu_time", + "extras", + b"extras", + "iters", + b"iters", + "metrics", + b"metrics", + "name", + b"name", + "throughput", + b"throughput", + "wall_time", + b"wall_time", + ], + ) -> None: ... + +global___BenchmarkEntry = BenchmarkEntry + +@typing.final +class BenchmarkEntries(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTRY_FIELD_NUMBER: builtins.int + @property + def entry(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BenchmarkEntry]: ... + def __init__(self, *, entry: collections.abc.Iterable[global___BenchmarkEntry] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["entry", b"entry"]) -> None: ... + +global___BenchmarkEntries = BenchmarkEntries + +@typing.final +class BuildConfiguration(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MODE_FIELD_NUMBER: builtins.int + CC_FLAGS_FIELD_NUMBER: builtins.int + OPTS_FIELD_NUMBER: builtins.int + mode: builtins.str + """opt, dbg, etc""" + @property + def cc_flags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """CC compiler flags, if known""" + + @property + def opts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Bazel compilation options, if known""" + + def __init__( + self, + *, + mode: builtins.str | None = ..., + cc_flags: collections.abc.Iterable[builtins.str] | None = ..., + opts: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cc_flags", b"cc_flags", "mode", b"mode", "opts", b"opts"]) -> None: ... + +global___BuildConfiguration = BuildConfiguration + +@typing.final +class CommitId(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CHANGELIST_FIELD_NUMBER: builtins.int + HASH_FIELD_NUMBER: builtins.int + SNAPSHOT_FIELD_NUMBER: builtins.int + PENDING_CHANGELIST_FIELD_NUMBER: builtins.int + changelist: builtins.int + """Submitted changelist.""" + hash: builtins.str + snapshot: builtins.str + """Hash of intermediate change between hash/changelist and what was tested. + Not used if the build is from a commit without modifications. + """ + pending_changelist: builtins.int + """Changelist tested if the change list is not already submitted.""" + def __init__( + self, + *, + changelist: builtins.int | None = ..., + hash: builtins.str | None = ..., + snapshot: builtins.str | None = ..., + pending_changelist: builtins.int | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "changelist", + b"changelist", + "hash", + b"hash", + "kind", + b"kind", + "pending_changelist", + b"pending_changelist", + "snapshot", + b"snapshot", + ], + ) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["kind", b"kind"]) -> typing.Literal["changelist", "hash"] | None: ... + +global___CommitId = CommitId + +@typing.final +class CPUInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class CacheSizeEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.int + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NUM_CORES_FIELD_NUMBER: builtins.int + NUM_CORES_ALLOWED_FIELD_NUMBER: builtins.int + MHZ_PER_CPU_FIELD_NUMBER: builtins.int + CPU_INFO_FIELD_NUMBER: builtins.int + CPU_GOVERNOR_FIELD_NUMBER: builtins.int + CACHE_SIZE_FIELD_NUMBER: builtins.int + num_cores: builtins.int + num_cores_allowed: builtins.int + mhz_per_cpu: builtins.float + """How fast are these cpus?""" + cpu_info: builtins.str + """Additional cpu information. For example, + Intel Ivybridge with HyperThreading (24 cores) dL1:32KB dL2:256KB dL3:30MB + """ + cpu_governor: builtins.str + """What kind of cpu scaling is enabled on the host. + Examples include "performance", "ondemand", "conservative", "mixed". + """ + @property + def cache_size(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: + """Cache sizes (in bytes), e.g. "L2": 262144 (for 256KB)""" + + def __init__( + self, + *, + num_cores: builtins.int | None = ..., + num_cores_allowed: builtins.int | None = ..., + mhz_per_cpu: builtins.float | None = ..., + cpu_info: builtins.str | None = ..., + cpu_governor: builtins.str | None = ..., + cache_size: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "cache_size", + b"cache_size", + "cpu_governor", + b"cpu_governor", + "cpu_info", + b"cpu_info", + "mhz_per_cpu", + b"mhz_per_cpu", + "num_cores", + b"num_cores", + "num_cores_allowed", + b"num_cores_allowed", + ], + ) -> None: ... + +global___CPUInfo = CPUInfo + +@typing.final +class MemoryInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TOTAL_FIELD_NUMBER: builtins.int + AVAILABLE_FIELD_NUMBER: builtins.int + total: builtins.int + """Total virtual memory in bytes""" + available: builtins.int + """Immediately available memory in bytes""" + def __init__(self, *, total: builtins.int | None = ..., available: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["available", b"available", "total", b"total"]) -> None: ... + +global___MemoryInfo = MemoryInfo + +@typing.final +class GPUInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MODEL_FIELD_NUMBER: builtins.int + UUID_FIELD_NUMBER: builtins.int + BUS_ID_FIELD_NUMBER: builtins.int + model: builtins.str + """e.g. "Tesla K40c" """ + uuid: builtins.str + """Final entry in output of "nvidia-smi -L" """ + bus_id: builtins.str + """e.g. "0000:04:00.0" """ + def __init__( + self, *, model: builtins.str | None = ..., uuid: builtins.str | None = ..., bus_id: builtins.str | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bus_id", b"bus_id", "model", b"model", "uuid", b"uuid"]) -> None: ... + +global___GPUInfo = GPUInfo + +@typing.final +class PlatformInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BITS_FIELD_NUMBER: builtins.int + LINKAGE_FIELD_NUMBER: builtins.int + MACHINE_FIELD_NUMBER: builtins.int + RELEASE_FIELD_NUMBER: builtins.int + SYSTEM_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + bits: builtins.str + """e.g. '64bit'""" + linkage: builtins.str + """e.g. 'ELF'""" + machine: builtins.str + """e.g. 'i386'""" + release: builtins.str + """e.g. '3.13.0-76-generic'""" + system: builtins.str + """e.g. 'Linux'""" + version: builtins.str + """e.g. '#120-Ubuntu SMP Mon Jan 18 15:59:10 UTC 2016'""" + def __init__( + self, + *, + bits: builtins.str | None = ..., + linkage: builtins.str | None = ..., + machine: builtins.str | None = ..., + release: builtins.str | None = ..., + system: builtins.str | None = ..., + version: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "bits", + b"bits", + "linkage", + b"linkage", + "machine", + b"machine", + "release", + b"release", + "system", + b"system", + "version", + b"version", + ], + ) -> None: ... + +global___PlatformInfo = PlatformInfo + +@typing.final +class AvailableDeviceInfo(google.protobuf.message.Message): + """Matches DeviceAttributes""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + MEMORY_LIMIT_FIELD_NUMBER: builtins.int + PHYSICAL_DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + """Device name.""" + type: builtins.str + """Device type, e.g. 'CPU' or 'GPU'.""" + memory_limit: builtins.int + """Memory capacity in bytes.""" + physical_description: builtins.str + """The physical description of this device.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + type: builtins.str | None = ..., + memory_limit: builtins.int | None = ..., + physical_description: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "memory_limit", b"memory_limit", "name", b"name", "physical_description", b"physical_description", "type", b"type" + ], + ) -> None: ... + +global___AvailableDeviceInfo = AvailableDeviceInfo + +@typing.final +class MachineConfiguration(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HOSTNAME_FIELD_NUMBER: builtins.int + SERIAL_IDENTIFIER_FIELD_NUMBER: builtins.int + PLATFORM_INFO_FIELD_NUMBER: builtins.int + CPU_INFO_FIELD_NUMBER: builtins.int + DEVICE_INFO_FIELD_NUMBER: builtins.int + AVAILABLE_DEVICE_INFO_FIELD_NUMBER: builtins.int + MEMORY_INFO_FIELD_NUMBER: builtins.int + hostname: builtins.str + """Host name of machine that ran the benchmark.""" + serial_identifier: builtins.str + """Unique serial number of the machine.""" + @property + def platform_info(self) -> global___PlatformInfo: + """Additional platform information.""" + + @property + def cpu_info(self) -> global___CPUInfo: + """CPU Information.""" + + @property + def device_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: + """Other devices that are attached and relevant (e.g. GPUInfo).""" + + @property + def available_device_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AvailableDeviceInfo]: + """Devices accessible to the test (e.g. as given by list_local_devices).""" + + @property + def memory_info(self) -> global___MemoryInfo: ... + def __init__( + self, + *, + hostname: builtins.str | None = ..., + serial_identifier: builtins.str | None = ..., + platform_info: global___PlatformInfo | None = ..., + cpu_info: global___CPUInfo | None = ..., + device_info: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ..., + available_device_info: collections.abc.Iterable[global___AvailableDeviceInfo] | None = ..., + memory_info: global___MemoryInfo | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal["cpu_info", b"cpu_info", "memory_info", b"memory_info", "platform_info", b"platform_info"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "available_device_info", + b"available_device_info", + "cpu_info", + b"cpu_info", + "device_info", + b"device_info", + "hostname", + b"hostname", + "memory_info", + b"memory_info", + "platform_info", + b"platform_info", + "serial_identifier", + b"serial_identifier", + ], + ) -> None: ... + +global___MachineConfiguration = MachineConfiguration + +@typing.final +class RunConfiguration(google.protobuf.message.Message): + """Run-specific items such as arguments to the test / benchmark.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class EnvVarsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + ARGUMENT_FIELD_NUMBER: builtins.int + ENV_VARS_FIELD_NUMBER: builtins.int + @property + def argument(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def env_vars(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Environment variables used to run the test/benchmark.""" + + def __init__( + self, + *, + argument: collections.abc.Iterable[builtins.str] | None = ..., + env_vars: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["argument", b"argument", "env_vars", b"env_vars"]) -> None: ... + +global___RunConfiguration = RunConfiguration + +@typing.final +class TestResults(google.protobuf.message.Message): + """The output of one benchmark / test run. Each run contains a list of + tests or benchmarks, stored as BenchmarkEntry messages. + + This message should be emitted by the reporter (which runs the + test / BM in a subprocess and then reads the emitted BenchmarkEntry messages; + usually from a serialized json file, finally collecting them along + with additional information about the test run. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _BenchmarkType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _BenchmarkTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TestResults._BenchmarkType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + UNKNOWN: TestResults._BenchmarkType.ValueType # 0 + """Fallback for protos written before Type was introduced.""" + CPP_MICROBENCHMARK: TestResults._BenchmarkType.ValueType # 1 + PYTHON_BENCHMARK: TestResults._BenchmarkType.ValueType # 2 + ANDROID_BENCHMARK: TestResults._BenchmarkType.ValueType # 3 + EDGE_BENCHMARK: TestResults._BenchmarkType.ValueType # 4 + IOS_BENCHMARK: TestResults._BenchmarkType.ValueType # 5 + + class BenchmarkType(_BenchmarkType, metaclass=_BenchmarkTypeEnumTypeWrapper): + """The type of benchmark.""" + + UNKNOWN: TestResults.BenchmarkType.ValueType # 0 + """Fallback for protos written before Type was introduced.""" + CPP_MICROBENCHMARK: TestResults.BenchmarkType.ValueType # 1 + PYTHON_BENCHMARK: TestResults.BenchmarkType.ValueType # 2 + ANDROID_BENCHMARK: TestResults.BenchmarkType.ValueType # 3 + EDGE_BENCHMARK: TestResults.BenchmarkType.ValueType # 4 + IOS_BENCHMARK: TestResults.BenchmarkType.ValueType # 5 + + TARGET_FIELD_NUMBER: builtins.int + ENTRIES_FIELD_NUMBER: builtins.int + BUILD_CONFIGURATION_FIELD_NUMBER: builtins.int + COMMIT_ID_FIELD_NUMBER: builtins.int + START_TIME_FIELD_NUMBER: builtins.int + RUN_TIME_FIELD_NUMBER: builtins.int + MACHINE_CONFIGURATION_FIELD_NUMBER: builtins.int + RUN_CONFIGURATION_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + BENCHMARK_TYPE_FIELD_NUMBER: builtins.int + RUN_MODE_FIELD_NUMBER: builtins.int + TF_VERSION_FIELD_NUMBER: builtins.int + target: builtins.str + """The target of the run, e.g.: + //tensorflow/core:kernels_adjust_contrast_op_benchmark_test + """ + start_time: builtins.int + """The time the run started (in seconds of UTC time since Unix epoch)""" + run_time: builtins.float + """The amount of time the total run took (wall time in seconds)""" + name: builtins.str + """Benchmark target identifier.""" + benchmark_type: global___TestResults.BenchmarkType.ValueType + run_mode: builtins.str + """Used for differentiating between continuous and debug builds. + Must be one of: + * cbuild: results from continuous build. + * presubmit: results from oneshot requests. + * culprit: results from culprit finder rerun. + """ + tf_version: builtins.str + """TensorFlow version this benchmark runs against. + This can be either set to full version or just the major version. + """ + @property + def entries(self) -> global___BenchmarkEntries: + """The list of tests or benchmarks in this run.""" + + @property + def build_configuration(self) -> global___BuildConfiguration: + """The configuration of the build (compiled opt? with cuda? any copts?)""" + + @property + def commit_id(self) -> global___CommitId: + """The commit id (git hash or changelist)""" + + @property + def machine_configuration(self) -> global___MachineConfiguration: + """Machine-specific parameters (Platform and CPU info)""" + + @property + def run_configuration(self) -> global___RunConfiguration: + """Run-specific parameters (arguments, etc)""" + + def __init__( + self, + *, + target: builtins.str | None = ..., + entries: global___BenchmarkEntries | None = ..., + build_configuration: global___BuildConfiguration | None = ..., + commit_id: global___CommitId | None = ..., + start_time: builtins.int | None = ..., + run_time: builtins.float | None = ..., + machine_configuration: global___MachineConfiguration | None = ..., + run_configuration: global___RunConfiguration | None = ..., + name: builtins.str | None = ..., + benchmark_type: global___TestResults.BenchmarkType.ValueType | None = ..., + run_mode: builtins.str | None = ..., + tf_version: builtins.str | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "build_configuration", + b"build_configuration", + "commit_id", + b"commit_id", + "entries", + b"entries", + "machine_configuration", + b"machine_configuration", + "run_configuration", + b"run_configuration", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "benchmark_type", + b"benchmark_type", + "build_configuration", + b"build_configuration", + "commit_id", + b"commit_id", + "entries", + b"entries", + "machine_configuration", + b"machine_configuration", + "name", + b"name", + "run_configuration", + b"run_configuration", + "run_mode", + b"run_mode", + "run_time", + b"run_time", + "start_time", + b"start_time", + "target", + b"target", + "tf_version", + b"tf_version", + ], + ) -> None: ... + +global___TestResults = TestResults diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi new file mode 100644 index 0000000000..de86f1c8f2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_data_pb2.pyi @@ -0,0 +1,2681 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2017 The OpenXLA Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _PrimitiveType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _PrimitiveTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PrimitiveType.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PRIMITIVE_TYPE_INVALID: _PrimitiveType.ValueType # 0 + """Invalid primitive type to serve as default.""" + PRED: _PrimitiveType.ValueType # 1 + """Predicates are two-state booleans.""" + S2: _PrimitiveType.ValueType # 26 + """Signed integral values of fixed width.""" + S4: _PrimitiveType.ValueType # 21 + S8: _PrimitiveType.ValueType # 2 + S16: _PrimitiveType.ValueType # 3 + S32: _PrimitiveType.ValueType # 4 + S64: _PrimitiveType.ValueType # 5 + U2: _PrimitiveType.ValueType # 27 + """Unsigned integral values of fixed width.""" + U4: _PrimitiveType.ValueType # 22 + U8: _PrimitiveType.ValueType # 6 + U16: _PrimitiveType.ValueType # 7 + U32: _PrimitiveType.ValueType # 8 + U64: _PrimitiveType.ValueType # 9 + F16: _PrimitiveType.ValueType # 10 + """Floating-point values of fixed width. + + Note: if f16s are not natively supported on the device, they will be + converted to f16 from f32 at arbirary points in the computation. + """ + F32: _PrimitiveType.ValueType # 11 + BF16: _PrimitiveType.ValueType # 16 + """Truncated 16 bit floating-point format. This is similar to IEEE's 16 bit + floating-point format, but uses 1 bit for the sign, 8 bits for the exponent + and 7 bits for the mantissa. + """ + F64: _PrimitiveType.ValueType # 12 + F8E5M2: _PrimitiveType.ValueType # 19 + """FP8 dtypes, as described in this paper: https://arxiv.org/abs/2209.05433 + + F8E5M2 has 5 exponent bits and 2 mantissa bits, and is similar to the + existing IEEE types. + + F8E4M3FN has 4 exponent bits and 3 mantissa bits. The "FN" means only + Finite and NaN values are supported. Unlike IEEE types, infinities are not + supported. NaN is represented when the exponent and mantissa bits are all + 1s. All other values are finite. + + F8E4M3B11FNUZ has 4 exponent bits and 3 mantissa bits and a bias of 11. The + "FNUZ" means only Finite and NaN values are supported; zero is unsigned. + Unlike IEEE types, infinities are not supported. NaN is represented when + the exponent and mantissa bits are all 0s with a sign bit of 1. All other + values are finite. + + Support for these dtypes is under development. They do not yet work + properly in most cases. + TODO(b/259609697): Fully support FP8. + """ + F8E4M3FN: _PrimitiveType.ValueType # 20 + F8E4M3B11FNUZ: _PrimitiveType.ValueType # 23 + F8E5M2FNUZ: _PrimitiveType.ValueType # 24 + """FP8 dtypes, as described in this paper: https://arxiv.org/abs/2206.02915 + + F8E5M2FNUZ has 5 exponent bits and 2 mantissa bits. + F8E4M3FNUZ has 4 exponent bits and 3 mantissa bits. + + The "FNUZ" means only Finite and NaN values are supported; zero is + unsigned. Unlike IEEE types, infinities are not supported. NaN is + represented when the exponent and mantissa bits are all 0s with a sign bit + of 1. All other values are finite. + + These differences mean there's an additional exponent value available. To + keep the same dynamic range as an IEEE-like FP8 type, the exponent is + biased one more than would be expected given the number of exponent bits + (8 for Float8E4M3FNUZ and 16 for Float8E5M2FNUZ). + """ + F8E4M3FNUZ: _PrimitiveType.ValueType # 25 + C64: _PrimitiveType.ValueType # 15 + """Complex values of fixed width. + Paired F32 (real, imag), as in std::complex. + """ + C128: _PrimitiveType.ValueType # 18 + """Paired F64 (real, imag), as in std::complex.""" + TUPLE: _PrimitiveType.ValueType # 13 + """A tuple is a polymorphic sequence; e.g. a shape that holds different + sub-shapes. They are used for things like returning multiple values from a + computation; e.g. a computation that returns weights and biases may have a + signature that results in a tuple like (f32[784x2000], f32[2000]) + + If a shape proto has the tuple element type, it may not have any entries + in the dimensions field. + """ + OPAQUE_TYPE: _PrimitiveType.ValueType # 14 + """An opaque type used for passing context-specific data to a custom + operation. Shapes of this primitive type will have empty dimensions and + tuple_shapes fields. + + (OPAQUE would be a better name for this identifier, but that conflicts with + a macro defined in windows.h.) + """ + TOKEN: _PrimitiveType.ValueType # 17 + """A token type threaded between side-effecting operations. Shapes of this + primitive type will have empty dimensions and tuple_shapes fields. + """ + +class PrimitiveType(_PrimitiveType, metaclass=_PrimitiveTypeEnumTypeWrapper): + """Primitive types are the individual values that can be held in rectangular + multidimensional arrays. A description of the rectangular multidimensional + array dimensions / primitive type is given by Shape, below. + + LINT.IfChange + """ + +PRIMITIVE_TYPE_INVALID: PrimitiveType.ValueType # 0 +"""Invalid primitive type to serve as default.""" +PRED: PrimitiveType.ValueType # 1 +"""Predicates are two-state booleans.""" +S2: PrimitiveType.ValueType # 26 +"""Signed integral values of fixed width.""" +S4: PrimitiveType.ValueType # 21 +S8: PrimitiveType.ValueType # 2 +S16: PrimitiveType.ValueType # 3 +S32: PrimitiveType.ValueType # 4 +S64: PrimitiveType.ValueType # 5 +U2: PrimitiveType.ValueType # 27 +"""Unsigned integral values of fixed width.""" +U4: PrimitiveType.ValueType # 22 +U8: PrimitiveType.ValueType # 6 +U16: PrimitiveType.ValueType # 7 +U32: PrimitiveType.ValueType # 8 +U64: PrimitiveType.ValueType # 9 +F16: PrimitiveType.ValueType # 10 +"""Floating-point values of fixed width. + +Note: if f16s are not natively supported on the device, they will be +converted to f16 from f32 at arbirary points in the computation. +""" +F32: PrimitiveType.ValueType # 11 +BF16: PrimitiveType.ValueType # 16 +"""Truncated 16 bit floating-point format. This is similar to IEEE's 16 bit +floating-point format, but uses 1 bit for the sign, 8 bits for the exponent +and 7 bits for the mantissa. +""" +F64: PrimitiveType.ValueType # 12 +F8E5M2: PrimitiveType.ValueType # 19 +"""FP8 dtypes, as described in this paper: https://arxiv.org/abs/2209.05433 + +F8E5M2 has 5 exponent bits and 2 mantissa bits, and is similar to the +existing IEEE types. + +F8E4M3FN has 4 exponent bits and 3 mantissa bits. The "FN" means only +Finite and NaN values are supported. Unlike IEEE types, infinities are not +supported. NaN is represented when the exponent and mantissa bits are all +1s. All other values are finite. + +F8E4M3B11FNUZ has 4 exponent bits and 3 mantissa bits and a bias of 11. The +"FNUZ" means only Finite and NaN values are supported; zero is unsigned. +Unlike IEEE types, infinities are not supported. NaN is represented when +the exponent and mantissa bits are all 0s with a sign bit of 1. All other +values are finite. + +Support for these dtypes is under development. They do not yet work +properly in most cases. +TODO(b/259609697): Fully support FP8. +""" +F8E4M3FN: PrimitiveType.ValueType # 20 +F8E4M3B11FNUZ: PrimitiveType.ValueType # 23 +F8E5M2FNUZ: PrimitiveType.ValueType # 24 +"""FP8 dtypes, as described in this paper: https://arxiv.org/abs/2206.02915 + +F8E5M2FNUZ has 5 exponent bits and 2 mantissa bits. +F8E4M3FNUZ has 4 exponent bits and 3 mantissa bits. + +The "FNUZ" means only Finite and NaN values are supported; zero is +unsigned. Unlike IEEE types, infinities are not supported. NaN is +represented when the exponent and mantissa bits are all 0s with a sign bit +of 1. All other values are finite. + +These differences mean there's an additional exponent value available. To +keep the same dynamic range as an IEEE-like FP8 type, the exponent is +biased one more than would be expected given the number of exponent bits +(8 for Float8E4M3FNUZ and 16 for Float8E5M2FNUZ). +""" +F8E4M3FNUZ: PrimitiveType.ValueType # 25 +C64: PrimitiveType.ValueType # 15 +"""Complex values of fixed width. +Paired F32 (real, imag), as in std::complex. +""" +C128: PrimitiveType.ValueType # 18 +"""Paired F64 (real, imag), as in std::complex.""" +TUPLE: PrimitiveType.ValueType # 13 +"""A tuple is a polymorphic sequence; e.g. a shape that holds different +sub-shapes. They are used for things like returning multiple values from a +computation; e.g. a computation that returns weights and biases may have a +signature that results in a tuple like (f32[784x2000], f32[2000]) + +If a shape proto has the tuple element type, it may not have any entries +in the dimensions field. +""" +OPAQUE_TYPE: PrimitiveType.ValueType # 14 +"""An opaque type used for passing context-specific data to a custom +operation. Shapes of this primitive type will have empty dimensions and +tuple_shapes fields. + +(OPAQUE would be a better name for this identifier, but that conflicts with +a macro defined in windows.h.) +""" +TOKEN: PrimitiveType.ValueType # 17 +"""A token type threaded between side-effecting operations. Shapes of this +primitive type will have empty dimensions and tuple_shapes fields. +""" +global___PrimitiveType = PrimitiveType + +class _DimLevelType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _DimLevelTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DimLevelType.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + DIM_DENSE: _DimLevelType.ValueType # 0 + """The corresponding dimension is Dense, every entry is stored.""" + DIM_COMPRESSED: _DimLevelType.ValueType # 1 + """The corresponding dimension is Compressed, only nonzeros are stored.""" + DIM_SINGLETON: _DimLevelType.ValueType # 2 + """The corresponding dimension contains a single coordinate, no sibling + elements for each parent. + """ + DIM_LOOSE_COMPRESSED: _DimLevelType.ValueType # 3 + """The corresponding dimension is Compressed, but with potential trailing + zeros, thus an extra upper bound (high) is used to exclude those zeros. + E.g., indices = [1, 2, 0, 0, 3, 4, 0, 0], position = [(0, 2), (4, 6)]. + """ + +class DimLevelType(_DimLevelType, metaclass=_DimLevelTypeEnumTypeWrapper): + """A DimLevelType indicates the encoding method for a dimension in an array. + The semantics of this field are identical to those of the MLIR SparseTensor + dialect. + This should be kept in sync with the SparseTensor DimLevelType enum: + https://github.com/llvm/llvm-project/blob/5674a3c88088e668b684326c2194a6282e8270ff/mlir/include/mlir/Dialect/SparseTensor/IR/SparseTensorAttrDefs.td#L86 + """ + +DIM_DENSE: DimLevelType.ValueType # 0 +"""The corresponding dimension is Dense, every entry is stored.""" +DIM_COMPRESSED: DimLevelType.ValueType # 1 +"""The corresponding dimension is Compressed, only nonzeros are stored.""" +DIM_SINGLETON: DimLevelType.ValueType # 2 +"""The corresponding dimension contains a single coordinate, no sibling +elements for each parent. +""" +DIM_LOOSE_COMPRESSED: DimLevelType.ValueType # 3 +"""The corresponding dimension is Compressed, but with potential trailing +zeros, thus an extra upper bound (high) is used to exclude those zeros. +E.g., indices = [1, 2, 0, 0, 3, 4, 0, 0], position = [(0, 2), (4, 6)]. +""" +global___DimLevelType = DimLevelType + +class _ProfileType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ProfileTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileType.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: _ProfileType.ValueType # 0 + WINDOW: _ProfileType.ValueType # 1 + FLAG: _ProfileType.ValueType # 2 + INTEGER: _ProfileType.ValueType # 3 + +class ProfileType(_ProfileType, metaclass=_ProfileTypeEnumTypeWrapper): + """The type optimization profiles in use for Op-level optimizations.""" + +INVALID: ProfileType.ValueType # 0 +WINDOW: ProfileType.ValueType # 1 +FLAG: ProfileType.ValueType # 2 +INTEGER: ProfileType.ValueType # 3 +global___ProfileType = ProfileType + +class _ProfileSource: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ProfileSourceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ProfileSource.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PROFILE_SOURCE_UNKNOWN_SOURCE: _ProfileSource.ValueType # 0 + PROFILE_SOURCE_EMBEDDED: _ProfileSource.ValueType # 1 + PROFILE_SOURCE_REMOTE: _ProfileSource.ValueType # 2 + +class ProfileSource(_ProfileSource, metaclass=_ProfileSourceEnumTypeWrapper): + """The source of the optimization profile.""" + +PROFILE_SOURCE_UNKNOWN_SOURCE: ProfileSource.ValueType # 0 +PROFILE_SOURCE_EMBEDDED: ProfileSource.ValueType # 1 +PROFILE_SOURCE_REMOTE: ProfileSource.ValueType # 2 +global___ProfileSource = ProfileSource + +class _CompilationEvent: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _CompilationEventEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CompilationEvent.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + COMPILATION_EVENT_UNKNOWN_EVENT: _CompilationEvent.ValueType # 0 + COMPILATION_EVENT_FIRST_COMPILATION: _CompilationEvent.ValueType # 1 + COMPILATION_EVENT_RECOMPILATION: _CompilationEvent.ValueType # 2 + +class CompilationEvent(_CompilationEvent, metaclass=_CompilationEventEnumTypeWrapper): + """The compilation event that triggered the use of the profile.""" + +COMPILATION_EVENT_UNKNOWN_EVENT: CompilationEvent.ValueType # 0 +COMPILATION_EVENT_FIRST_COMPILATION: CompilationEvent.ValueType # 1 +COMPILATION_EVENT_RECOMPILATION: CompilationEvent.ValueType # 2 +global___CompilationEvent = CompilationEvent + +class _PaddingType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _PaddingTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_PaddingType.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PADDING_INVALID: _PaddingType.ValueType # 0 + PADDING_VALID: _PaddingType.ValueType # 1 + """Only valid portion of the base are covered.""" + PADDING_SAME: _PaddingType.ValueType # 2 + """Extra is added to produce same output size as the input.""" + +class PaddingType(_PaddingType, metaclass=_PaddingTypeEnumTypeWrapper): ... + +PADDING_INVALID: PaddingType.ValueType # 0 +PADDING_VALID: PaddingType.ValueType # 1 +"""Only valid portion of the base are covered.""" +PADDING_SAME: PaddingType.ValueType # 2 +"""Extra is added to produce same output size as the input.""" +global___PaddingType = PaddingType + +class _FftType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FftTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FftType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FFT: _FftType.ValueType # 0 + """Forward FFT; complex in, complex out.""" + IFFT: _FftType.ValueType # 1 + """Inverse FFT; complex in, complex out.""" + RFFT: _FftType.ValueType # 2 + """Forward real FFT; real in, fft_length / 2 + 1 complex out""" + IRFFT: _FftType.ValueType # 3 + """Inverse real FFT; fft_length / 2 + 1 complex in,""" + +class FftType(_FftType, metaclass=_FftTypeEnumTypeWrapper): ... + +FFT: FftType.ValueType # 0 +"""Forward FFT; complex in, complex out.""" +IFFT: FftType.ValueType # 1 +"""Inverse FFT; complex in, complex out.""" +RFFT: FftType.ValueType # 2 +"""Forward real FFT; real in, fft_length / 2 + 1 complex out""" +IRFFT: FftType.ValueType # 3 +"""Inverse real FFT; fft_length / 2 + 1 complex in,""" +global___FftType = FftType + +class _SparsityType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _SparsityTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SparsityType.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SPARSITY_INVALID: _SparsityType.ValueType # 0 + SPARSITY_STRUCTURED_N_M: _SparsityType.ValueType # 1 + """Structured N:M sparsity.""" + +class SparsityType(_SparsityType, metaclass=_SparsityTypeEnumTypeWrapper): ... + +SPARSITY_INVALID: SparsityType.ValueType # 0 +SPARSITY_STRUCTURED_N_M: SparsityType.ValueType # 1 +"""Structured N:M sparsity.""" +global___SparsityType = SparsityType + +class _RandomDistribution: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _RandomDistributionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomDistribution.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + RNG_INVALID: _RandomDistribution.ValueType # 0 + RNG_UNIFORM: _RandomDistribution.ValueType # 1 + """Creates a uniform-distribution-generated random number on the semi-open + interval [parameter[0], parameter[1]). + """ + RNG_NORMAL: _RandomDistribution.ValueType # 2 + """Creates a normal-distribution-generated random number with mean + parameter[0] and standard deviation parameter[1]. + """ + +class RandomDistribution(_RandomDistribution, metaclass=_RandomDistributionEnumTypeWrapper): ... + +RNG_INVALID: RandomDistribution.ValueType # 0 +RNG_UNIFORM: RandomDistribution.ValueType # 1 +"""Creates a uniform-distribution-generated random number on the semi-open +interval [parameter[0], parameter[1]). +""" +RNG_NORMAL: RandomDistribution.ValueType # 2 +"""Creates a normal-distribution-generated random number with mean +parameter[0] and standard deviation parameter[1]. +""" +global___RandomDistribution = RandomDistribution + +class _RandomAlgorithm: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _RandomAlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_RandomAlgorithm.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + RNG_DEFAULT: _RandomAlgorithm.ValueType # 0 + """Backend dependent default algorithm.""" + RNG_THREE_FRY: _RandomAlgorithm.ValueType # 1 + RNG_PHILOX: _RandomAlgorithm.ValueType # 2 + """Next: 2""" + +class RandomAlgorithm(_RandomAlgorithm, metaclass=_RandomAlgorithmEnumTypeWrapper): ... + +RNG_DEFAULT: RandomAlgorithm.ValueType # 0 +"""Backend dependent default algorithm.""" +RNG_THREE_FRY: RandomAlgorithm.ValueType # 1 +RNG_PHILOX: RandomAlgorithm.ValueType # 2 +"""Next: 2""" +global___RandomAlgorithm = RandomAlgorithm + +@typing.final +class PaddingConfig(google.protobuf.message.Message): + """Describes the padding configuration for Pad operation. The padding amount on + both edges as well as between the elements are specified for each dimension. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class PaddingConfigDimension(google.protobuf.message.Message): + """Describes the padding configuration for a dimension.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EDGE_PADDING_LOW_FIELD_NUMBER: builtins.int + EDGE_PADDING_HIGH_FIELD_NUMBER: builtins.int + INTERIOR_PADDING_FIELD_NUMBER: builtins.int + edge_padding_low: builtins.int + """Padding amount on the low-end (next to the index 0). May be negative.""" + edge_padding_high: builtins.int + """Padding amount on the high-end (next to the highest index). May be + negative. + """ + interior_padding: builtins.int + """Padding amount between the elements. May not be negative.""" + def __init__( + self, + *, + edge_padding_low: builtins.int | None = ..., + edge_padding_high: builtins.int | None = ..., + interior_padding: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "edge_padding_high", + b"edge_padding_high", + "edge_padding_low", + b"edge_padding_low", + "interior_padding", + b"interior_padding", + ], + ) -> None: ... + + DIMENSIONS_FIELD_NUMBER: builtins.int + @property + def dimensions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PaddingConfig.PaddingConfigDimension]: + """The padding configuration for all dimensions.""" + + def __init__( + self, *, dimensions: collections.abc.Iterable[global___PaddingConfig.PaddingConfigDimension] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... + +global___PaddingConfig = PaddingConfig + +@typing.final +class TileProto(google.protobuf.message.Message): + """Describes a tile used in tiling-based layout. Refer to + g3doc/third_party/xla/docs/tiled_layout.md for details about tiling-based + layout. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DIMENSIONS_FIELD_NUMBER: builtins.int + @property + def dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Number of elements in each dimension of the tile. It's ordered from the + most major dimension of the tile to the most minor dimension of the tile. + The dimensions correspond to a suffix of the dimensions of the shape being + tiled. + """ + + def __init__(self, *, dimensions: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... + +global___TileProto = TileProto + +@typing.final +class SplitConfigProto(google.protobuf.message.Message): + """Describes how data should be split between different memories.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DIMENSION_FIELD_NUMBER: builtins.int + SPLIT_INDICES_FIELD_NUMBER: builtins.int + dimension: builtins.int + """The dimension that is split.""" + @property + def split_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The indices where each split point occurs. For example, if the dimension + size is 1024, a split_indices value of {512} indicates a two-way split of + data through the middle. + """ + + def __init__( + self, *, dimension: builtins.int | None = ..., split_indices: collections.abc.Iterable[builtins.int] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "split_indices", b"split_indices"]) -> None: ... + +global___SplitConfigProto = SplitConfigProto + +@typing.final +class LayoutProto(google.protobuf.message.Message): + """A layout describes how the array is placed in (1D) memory space. This + includes the minor-to-major ordering of dimensions within a shape. + + Clients must specify the layouts of input Literals to the + computation. Layouts specified in interior operations which take Shapes (for + example, Convert) are ignored. + + See the XLA documentation for more information on shapes and layouts. + + LINT.IfChange + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DIM_LEVEL_TYPES_FIELD_NUMBER: builtins.int + DIM_UNIQUE_FIELD_NUMBER: builtins.int + DIM_ORDERED_FIELD_NUMBER: builtins.int + MINOR_TO_MAJOR_FIELD_NUMBER: builtins.int + TILES_FIELD_NUMBER: builtins.int + TAIL_PADDING_ALIGNMENT_IN_ELEMENTS_FIELD_NUMBER: builtins.int + ELEMENT_SIZE_IN_BITS_FIELD_NUMBER: builtins.int + MEMORY_SPACE_FIELD_NUMBER: builtins.int + INDEX_PRIMITIVE_TYPE_FIELD_NUMBER: builtins.int + POINTER_PRIMITIVE_TYPE_FIELD_NUMBER: builtins.int + PHYSICAL_SHAPE_FIELD_NUMBER: builtins.int + DYNAMIC_SHAPE_METADATA_PREFIX_BYTES_FIELD_NUMBER: builtins.int + SPLIT_CONFIGS_FIELD_NUMBER: builtins.int + tail_padding_alignment_in_elements: builtins.int + """The shape is padded at the end to multiple of, in terms of number of + elements. This is useful when tiling does not bring the shape to certain + desired granules. Tiling effectively pads/reshapes/transposes the shape + to another shape. This field pads the total number of elements of that + new shape to a multiple of certain number of elements. This is useful such + as we want a layout which does not tile the data but still requires it to + be padded to certain number of elements. + """ + element_size_in_bits: builtins.int + """(Optional) Bit size of each element. When unspecified or being 0, default + to ShapeUtil::ByteSizeOfPrimitiveType. + """ + memory_space: builtins.int + """Memory space where this array resides. The integer field is interpreted in + a backend-specific manner. + """ + index_primitive_type: global___PrimitiveType.ValueType + """The integer types to be used for indices and pointers. These fields must + not be used unless the layout represents a sparse array. The PrimitiveType + must correspond to an unsigned integer (U8, U16, U32, or U64). + If not provided, the compiler will use the largest unsigned integer + that is naturally supported by the target device (U32 or U64 in currently + supported devices). + """ + pointer_primitive_type: global___PrimitiveType.ValueType + dynamic_shape_metadata_prefix_bytes: builtins.int + """The dynamic shape metadata size in bytes in front of the shape data. The + field may be non-zero for a static shape whose associated buffer is for a + dynamic shape, e.g. a result of SliceToDynamic. + """ + @property + def dim_level_types( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DimLevelType.ValueType]: + """The dimension level type list for this array, specifying the way in which + each array dimension is represented in memory. If this list is empty, the + array is assumed to be dense. + """ + + @property + def dim_unique(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """Whether each dimension is unique or ordered. Each of the following lists + must be empty, or have one entry for each entry of dim_level_types. If + either list is empty, all dimensions are assumed to be unique and ordered, + respectively. Entries in this list may not be false for some DimLevelType + values (such as DIM_DENSE in particular). + """ + + @property + def dim_ordered(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + @property + def minor_to_major(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Sequence of dimension numbers, from minor (fastest varying index) to major + (slowest varying index). This field is required. + """ + + @property + def tiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TileProto]: + """A sequence of tiles, starting from the tile that's applied first to the + Shape. + + TODO(b/119839262): implement tiling in each backend or add Unimplemented + error. + """ + + @property + def physical_shape(self) -> global___ShapeProto: + """The physical, on-device shape used to represent the shape this layout + belongs to. Only used for sparse arrays. + The layout(s) contained within the physical shape should not also contain + a physical shape. + """ + + @property + def split_configs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SplitConfigProto]: + """The split configurations which describe if/how the data is split between + different memories. + """ + + def __init__( + self, + *, + dim_level_types: collections.abc.Iterable[global___DimLevelType.ValueType] | None = ..., + dim_unique: collections.abc.Iterable[builtins.bool] | None = ..., + dim_ordered: collections.abc.Iterable[builtins.bool] | None = ..., + minor_to_major: collections.abc.Iterable[builtins.int] | None = ..., + tiles: collections.abc.Iterable[global___TileProto] | None = ..., + tail_padding_alignment_in_elements: builtins.int | None = ..., + element_size_in_bits: builtins.int | None = ..., + memory_space: builtins.int | None = ..., + index_primitive_type: global___PrimitiveType.ValueType | None = ..., + pointer_primitive_type: global___PrimitiveType.ValueType | None = ..., + physical_shape: global___ShapeProto | None = ..., + dynamic_shape_metadata_prefix_bytes: builtins.int | None = ..., + split_configs: collections.abc.Iterable[global___SplitConfigProto] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["physical_shape", b"physical_shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dim_level_types", + b"dim_level_types", + "dim_ordered", + b"dim_ordered", + "dim_unique", + b"dim_unique", + "dynamic_shape_metadata_prefix_bytes", + b"dynamic_shape_metadata_prefix_bytes", + "element_size_in_bits", + b"element_size_in_bits", + "index_primitive_type", + b"index_primitive_type", + "memory_space", + b"memory_space", + "minor_to_major", + b"minor_to_major", + "physical_shape", + b"physical_shape", + "pointer_primitive_type", + b"pointer_primitive_type", + "split_configs", + b"split_configs", + "tail_padding_alignment_in_elements", + b"tail_padding_alignment_in_elements", + "tiles", + b"tiles", + ], + ) -> None: ... + +global___LayoutProto = LayoutProto + +@typing.final +class ShapeProto(google.protobuf.message.Message): + """A shape describes the number of dimensions in the array, the size of each + dimension, and the primitive component type. + + Tuples are a special case in that they have rank zero and have tuple_shapes + defined. + + See the XLA documentation for more information on shapes and layouts. + + LINT.IfChange + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ELEMENT_TYPE_FIELD_NUMBER: builtins.int + DIMENSIONS_FIELD_NUMBER: builtins.int + TUPLE_SHAPES_FIELD_NUMBER: builtins.int + LAYOUT_FIELD_NUMBER: builtins.int + IS_DYNAMIC_DIMENSION_FIELD_NUMBER: builtins.int + element_type: global___PrimitiveType.ValueType + """The element type for this shape.""" + @property + def dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The size (number of elements) for each dimension, or an upper bound on the + size if the dimension is dynamic. In XLA, dimensions are numbered from 0 + to N-1 for an N-dimensional array. The first element of 'dimensions' is the + size of dimension 0, the second element is the size of dimension 1, and so + forth. Empty list indicates a scalar. + + If the respective element in 'is_dimension_dynamic' is true then the value + in this field represents an upper bound on the size of the dimension. + """ + + @property + def tuple_shapes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShapeProto]: + """For tuples only, the shapes of constituent shapes in the tuple sequence.""" + + @property + def layout(self) -> global___LayoutProto: + """The layout used to back this shape.""" + + @property + def is_dynamic_dimension(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """For arrays, this indicates whether or not each dimension is + dynamically-sized. The number of elements in this repeated field should be + zero (indicating that no dimensions are dynamic) or equal to the number of + elements in the 'dimensions' field. + """ + + def __init__( + self, + *, + element_type: global___PrimitiveType.ValueType | None = ..., + dimensions: collections.abc.Iterable[builtins.int] | None = ..., + tuple_shapes: collections.abc.Iterable[global___ShapeProto] | None = ..., + layout: global___LayoutProto | None = ..., + is_dynamic_dimension: collections.abc.Iterable[builtins.bool] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["layout", b"layout"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dimensions", + b"dimensions", + "element_type", + b"element_type", + "is_dynamic_dimension", + b"is_dynamic_dimension", + "layout", + b"layout", + "tuple_shapes", + b"tuple_shapes", + ], + ) -> None: ... + +global___ShapeProto = ShapeProto + +@typing.final +class ProgramShapeProto(google.protobuf.message.Message): + """Shape of the parameters and output of a computation (like a traditional + function signature). + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARAMETERS_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + PARAMETER_NAMES_FIELD_NUMBER: builtins.int + @property + def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShapeProto]: ... + @property + def result(self) -> global___ShapeProto: ... + @property + def parameter_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + parameters: collections.abc.Iterable[global___ShapeProto] | None = ..., + result: global___ShapeProto | None = ..., + parameter_names: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["parameter_names", b"parameter_names", "parameters", b"parameters", "result", b"result"] + ) -> None: ... + +global___ProgramShapeProto = ProgramShapeProto + +@typing.final +class ComputationStats(google.protobuf.message.Message): + """Statistics of a computation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FLOP_COUNT_FIELD_NUMBER: builtins.int + TRANSCENDENTAL_COUNT_FIELD_NUMBER: builtins.int + flop_count: builtins.float + """The number of floating point operations in the computation.""" + transcendental_count: builtins.float + """The number of transcendental operations (e.g., exp) in the computation.""" + def __init__(self, *, flop_count: builtins.float | None = ..., transcendental_count: builtins.float | None = ...) -> None: ... + def ClearField( + self, field_name: typing.Literal["flop_count", b"flop_count", "transcendental_count", b"transcendental_count"] + ) -> None: ... + +global___ComputationStats = ComputationStats + +@typing.final +class OpMetadata(google.protobuf.message.Message): + """Symbolization metadata for HLO Instructions. + + This metadata is used for debugging XLA code generation, as well as + performance profiling of XLA-generated executables. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class ProfileInfo(google.protobuf.message.Message): + """Information about the optimization profile that this operation contains.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROFILE_TYPE_FIELD_NUMBER: builtins.int + RELATIVE_SPEEDUP_FIELD_NUMBER: builtins.int + PROFILE_SOURCE_FIELD_NUMBER: builtins.int + COMPILATION_EVENT_FIELD_NUMBER: builtins.int + relative_speedup: builtins.float + """Speedup of tuned config compared to default config. + TODO(b/203817882) Set the relative_speedup. + """ + profile_source: global___ProfileSource.ValueType + """The source of the optimization profiles that this operation contains.""" + compilation_event: global___CompilationEvent.ValueType + """The compilation event that triggered the use of the profiles.""" + @property + def profile_type( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___ProfileType.ValueType]: + """The type of optimization profiles that this operation contains.""" + + def __init__( + self, + *, + profile_type: collections.abc.Iterable[global___ProfileType.ValueType] | None = ..., + relative_speedup: builtins.float | None = ..., + profile_source: global___ProfileSource.ValueType | None = ..., + compilation_event: global___CompilationEvent.ValueType | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_event", + b"compilation_event", + "profile_source", + b"profile_source", + "profile_type", + b"profile_type", + "relative_speedup", + b"relative_speedup", + ], + ) -> None: ... + + OP_TYPE_FIELD_NUMBER: builtins.int + OP_NAME_FIELD_NUMBER: builtins.int + SOURCE_FILE_FIELD_NUMBER: builtins.int + SOURCE_LINE_FIELD_NUMBER: builtins.int + PROFILE_TYPE_FIELD_NUMBER: builtins.int + SIZE_OF_GENERATED_CODE_IN_BYTES_FIELD_NUMBER: builtins.int + SIZE_OF_MEMORY_WORKING_SET_IN_BYTES_FIELD_NUMBER: builtins.int + PROFILE_INFO_FIELD_NUMBER: builtins.int + DEDUPLICATED_NAME_FIELD_NUMBER: builtins.int + PRESERVE_LAYOUT_FIELD_NUMBER: builtins.int + STACK_FRAME_ID_FIELD_NUMBER: builtins.int + SCHEDULING_NAME_FIELD_NUMBER: builtins.int + op_type: builtins.str + """The framework op name that generated this XLA op. + + Frameworks that build on top of XLA should mirror the names of their ops + back to users by specifying the op_type. In this way, even if the + framework's "ops" are implemented as multiple XLA HLO Ops, they can be + grouped appropriately. (e.g. if a SoftMax layer is emitted into XLA as + multiple ops, then each op should have the op_type be "SoftMax".) + """ + op_name: builtins.str + """The user-specified name of the op. + + This name is often unique within a computation. Note: some frameworks + add auto-generated names if the user does not provide one. + """ + source_file: builtins.str + """Indicate a file and line that this op is associated to in a user's program. + + e.g. it could be the file and line of user code that generated the op. + """ + source_line: builtins.int + size_of_generated_code_in_bytes: builtins.int + """The footprint of the generated code for the instruction.""" + size_of_memory_working_set_in_bytes: builtins.int + """The size of the working set, i.e., the amount of memory, used by the + instruction in a compiler-managed fast device memory. + """ + deduplicated_name: builtins.str + """Deduplicated HLO name for this op. In some cases, we can have multiple + instructions (e.g. fusions) that are considered duplicates. We want to + group them together under the same name so that we can group them together + during analysis (e.g. HLO Op Profile tool in Xprof). + E.g. If we have fusion.1, fusion.2, and fusion.3 marked as duplicates, + fusion.2 and fusion.3 will have deduplicated_name = fusion.1 + """ + preserve_layout: builtins.bool + """Whether to preserve the layout of the HLO op.""" + stack_frame_id: builtins.int + """1-based position of the frame in frames flat array. + Ids are 1-based to keep 0 value as representation of non-set property. + """ + scheduling_name: builtins.str + """Instruction name available upon scheduling.""" + @property + def profile_type(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___ProfileType.ValueType]: + """Deprecated, use [ProfileInfo][profile_type] instead.""" + + @property + def profile_info(self) -> global___OpMetadata.ProfileInfo: + """Profile information for the Op.""" + + def __init__( + self, + *, + op_type: builtins.str | None = ..., + op_name: builtins.str | None = ..., + source_file: builtins.str | None = ..., + source_line: builtins.int | None = ..., + profile_type: collections.abc.Iterable[global___ProfileType.ValueType] | None = ..., + size_of_generated_code_in_bytes: builtins.int | None = ..., + size_of_memory_working_set_in_bytes: builtins.int | None = ..., + profile_info: global___OpMetadata.ProfileInfo | None = ..., + deduplicated_name: builtins.str | None = ..., + preserve_layout: builtins.bool | None = ..., + stack_frame_id: builtins.int | None = ..., + scheduling_name: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["profile_info", b"profile_info"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "deduplicated_name", + b"deduplicated_name", + "op_name", + b"op_name", + "op_type", + b"op_type", + "preserve_layout", + b"preserve_layout", + "profile_info", + b"profile_info", + "profile_type", + b"profile_type", + "scheduling_name", + b"scheduling_name", + "size_of_generated_code_in_bytes", + b"size_of_generated_code_in_bytes", + "size_of_memory_working_set_in_bytes", + b"size_of_memory_working_set_in_bytes", + "source_file", + b"source_file", + "source_line", + b"source_line", + "stack_frame_id", + b"stack_frame_id", + ], + ) -> None: ... + +global___OpMetadata = OpMetadata + +@typing.final +class ExecutionProfile(google.protobuf.message.Message): + """Profile data from the execution of a computation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + COMPILATION_CACHE_HIT_FIELD_NUMBER: builtins.int + COMPILE_TIME_MS_FIELD_NUMBER: builtins.int + COMPUTE_CYCLE_COUNT_FIELD_NUMBER: builtins.int + COMPUTE_TIME_NS_FIELD_NUMBER: builtins.int + COMPUTE_AND_TRANSFER_TIME_NS_FIELD_NUMBER: builtins.int + EXECUTABLE_SIZE_IN_BYTES_FIELD_NUMBER: builtins.int + PROFILE_CACHE_HIT_FIELD_NUMBER: builtins.int + WARMUP_RUN_EXECUTED_FIELD_NUMBER: builtins.int + compilation_cache_hit: builtins.bool + """Whether the executable was read from the compilation cache.""" + compile_time_ms: builtins.int + """The time in milliseconds spent to compile the computation. This only set if + the executable was not read from the compilation cache + (compilation_cache_hit == false). + """ + compute_cycle_count: builtins.int + """The number of cycles spent for the computation. This does not include the + time taken for the data transfers between the host and the device. This is + a target-dependent field and only used for debugging purposes. + """ + compute_time_ns: builtins.int + """The time in nanoseconds spent for the computation, without data transfer.""" + compute_and_transfer_time_ns: builtins.int + """The time in nanoseconds spent for the entire computation, including the + result data transfer time. Current implementation does not spend any cycles + for the input data transfer since the memory is initialized with the proper + values before the execution. + """ + executable_size_in_bytes: builtins.int + """The size of the binary code in the executable.""" + profile_cache_hit: builtins.bool + """Whether this profile was drawn from a cache of profiles instead of from + execution on the hardware. + """ + warmup_run_executed: builtins.bool + """Whether a warm-up run of the computation was executed before the + measured execution. + """ + def __init__( + self, + *, + compilation_cache_hit: builtins.bool | None = ..., + compile_time_ms: builtins.int | None = ..., + compute_cycle_count: builtins.int | None = ..., + compute_time_ns: builtins.int | None = ..., + compute_and_transfer_time_ns: builtins.int | None = ..., + executable_size_in_bytes: builtins.int | None = ..., + profile_cache_hit: builtins.bool | None = ..., + warmup_run_executed: builtins.bool | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compilation_cache_hit", + b"compilation_cache_hit", + "compile_time_ms", + b"compile_time_ms", + "compute_and_transfer_time_ns", + b"compute_and_transfer_time_ns", + "compute_cycle_count", + b"compute_cycle_count", + "compute_time_ns", + b"compute_time_ns", + "executable_size_in_bytes", + b"executable_size_in_bytes", + "profile_cache_hit", + b"profile_cache_hit", + "warmup_run_executed", + b"warmup_run_executed", + ], + ) -> None: ... + +global___ExecutionProfile = ExecutionProfile + +@typing.final +class ExecutionHandle(google.protobuf.message.Message): + """Handle given to a user that represents an execution that the user launched + asynchronously on the device. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HANDLE_FIELD_NUMBER: builtins.int + handle: builtins.int + def __init__(self, *, handle: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["handle", b"handle"]) -> None: ... + +global___ExecutionHandle = ExecutionHandle + +@typing.final +class GlobalDataHandle(google.protobuf.message.Message): + """Handle given to a user that represents a globally accessible allocation. + Contrast this against a ComputationDataHandle, which is not globally + accessible, since it only exists within a specific computation. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HANDLE_FIELD_NUMBER: builtins.int + handle: builtins.int + def __init__(self, *, handle: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["handle", b"handle"]) -> None: ... + +global___GlobalDataHandle = GlobalDataHandle + +@typing.final +class DeviceHandle(google.protobuf.message.Message): + """Handle given to a user that represents a replicated virtual device. Each + replicated device represents N physical devices for execution where N is the + number of replicas. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HANDLE_FIELD_NUMBER: builtins.int + DEVICE_COUNT_FIELD_NUMBER: builtins.int + handle: builtins.int + device_count: builtins.int + """The number of model-parallel virtual devices that communicate via XLA + Send/Recv instructions. + """ + def __init__(self, *, handle: builtins.int | None = ..., device_count: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["device_count", b"device_count", "handle", b"handle"]) -> None: ... + +global___DeviceHandle = DeviceHandle + +@typing.final +class ChannelHandle(google.protobuf.message.Message): + """Handle given to a user to represent a channel between two computations + via a Send and Recv instruction pair. Channels are unbuffered, so Send + Send instructions will be blocked until the data is transferred. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ChannelType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ChannelTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ChannelHandle._ChannelType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CHANNEL_TYPE_INVALID: ChannelHandle._ChannelType.ValueType # 0 + """Invalid primitive type to serve as default.""" + DEVICE_TO_DEVICE: ChannelHandle._ChannelType.ValueType # 1 + """A channel for sending data between devices.""" + DEVICE_TO_HOST: ChannelHandle._ChannelType.ValueType # 2 + """A channel for sending data from the device to the host. Can only be used + with a Send operation. + """ + HOST_TO_DEVICE: ChannelHandle._ChannelType.ValueType # 3 + """A channel for sending data from the host to the device. Can only be used + with a Recv operation. + """ + + class ChannelType(_ChannelType, metaclass=_ChannelTypeEnumTypeWrapper): ... + CHANNEL_TYPE_INVALID: ChannelHandle.ChannelType.ValueType # 0 + """Invalid primitive type to serve as default.""" + DEVICE_TO_DEVICE: ChannelHandle.ChannelType.ValueType # 1 + """A channel for sending data between devices.""" + DEVICE_TO_HOST: ChannelHandle.ChannelType.ValueType # 2 + """A channel for sending data from the device to the host. Can only be used + with a Send operation. + """ + HOST_TO_DEVICE: ChannelHandle.ChannelType.ValueType # 3 + """A channel for sending data from the host to the device. Can only be used + with a Recv operation. + """ + + HANDLE_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + handle: builtins.int + type: global___ChannelHandle.ChannelType.ValueType + def __init__( + self, *, handle: builtins.int | None = ..., type: global___ChannelHandle.ChannelType.ValueType | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["handle", b"handle", "type", b"type"]) -> None: ... + +global___ChannelHandle = ChannelHandle + +@typing.final +class DeviceAssignmentProto(google.protobuf.message.Message): + """DeviceAssignmentProto is a serialized form of DeviceAssignment class, which + represents the device ids assigned to a set of replicated computations. + See xla::DeviceAssignment class comment for more details. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class ComputationDevice(google.protobuf.message.Message): + """Each logical computation runs on replica_count physical devices. + ComputationDevice represents the device ids assinged to the replicas. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REPLICA_DEVICE_IDS_FIELD_NUMBER: builtins.int + @property + def replica_device_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, *, replica_device_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["replica_device_ids", b"replica_device_ids"]) -> None: ... + + REPLICA_COUNT_FIELD_NUMBER: builtins.int + COMPUTATION_COUNT_FIELD_NUMBER: builtins.int + COMPUTATION_DEVICES_FIELD_NUMBER: builtins.int + replica_count: builtins.int + computation_count: builtins.int + @property + def computation_devices( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___DeviceAssignmentProto.ComputationDevice + ]: ... + def __init__( + self, + *, + replica_count: builtins.int | None = ..., + computation_count: builtins.int | None = ..., + computation_devices: collections.abc.Iterable[global___DeviceAssignmentProto.ComputationDevice] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_count", + b"computation_count", + "computation_devices", + b"computation_devices", + "replica_count", + b"replica_count", + ], + ) -> None: ... + +global___DeviceAssignmentProto = DeviceAssignmentProto + +@typing.final +class LiteralProto(google.protobuf.message.Message): + """Literals are used when the server and client need to exchange materialized + data / results. Literals are also used to describe constants used in + computations. + + Transfers to/from the client are encoded in literal form, and the structure + of the repeated fields is implied by the shape. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SHAPE_FIELD_NUMBER: builtins.int + PREDS_FIELD_NUMBER: builtins.int + S2S_FIELD_NUMBER: builtins.int + S4S_FIELD_NUMBER: builtins.int + S8S_FIELD_NUMBER: builtins.int + U2S_FIELD_NUMBER: builtins.int + U4S_FIELD_NUMBER: builtins.int + U8S_FIELD_NUMBER: builtins.int + S32S_FIELD_NUMBER: builtins.int + S64S_FIELD_NUMBER: builtins.int + U32S_FIELD_NUMBER: builtins.int + U64S_FIELD_NUMBER: builtins.int + F32S_FIELD_NUMBER: builtins.int + F64S_FIELD_NUMBER: builtins.int + C64S_FIELD_NUMBER: builtins.int + C128S_FIELD_NUMBER: builtins.int + TUPLE_LITERALS_FIELD_NUMBER: builtins.int + F16S_FIELD_NUMBER: builtins.int + BF16S_FIELD_NUMBER: builtins.int + U16S_FIELD_NUMBER: builtins.int + S16S_FIELD_NUMBER: builtins.int + F8E5M2S_FIELD_NUMBER: builtins.int + F8E4M3FNS_FIELD_NUMBER: builtins.int + F8E4M3B11FNUZS_FIELD_NUMBER: builtins.int + F8E5M2FNUZS_FIELD_NUMBER: builtins.int + F8E4M3FNUZS_FIELD_NUMBER: builtins.int + SPARSE_INDICES_FIELD_NUMBER: builtins.int + s2s: builtins.bytes + s4s: builtins.bytes + s8s: builtins.bytes + u2s: builtins.bytes + u4s: builtins.bytes + u8s: builtins.bytes + f16s: builtins.bytes + """The F16s, BF16s, U16s and S16s are encoded in little endian byte order""" + bf16s: builtins.bytes + u16s: builtins.bytes + s16s: builtins.bytes + f8e5m2s: builtins.bytes + f8e4m3fns: builtins.bytes + f8e4m3b11fnuzs: builtins.bytes + f8e5m2fnuzs: builtins.bytes + f8e4m3fnuzs: builtins.bytes + @property + def shape(self) -> global___ShapeProto: ... + @property + def preds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + @property + def s32s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def s64s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def u32s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def u64s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def f32s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + @property + def f64s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + @property + def c64s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: + """Stored as interleaved real, imag floats.""" + + @property + def c128s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: + """Stored as interleaved real, imag doubles.""" + + @property + def tuple_literals(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LiteralProto]: ... + @property + def sparse_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Next = 28""" + + def __init__( + self, + *, + shape: global___ShapeProto | None = ..., + preds: collections.abc.Iterable[builtins.bool] | None = ..., + s2s: builtins.bytes | None = ..., + s4s: builtins.bytes | None = ..., + s8s: builtins.bytes | None = ..., + u2s: builtins.bytes | None = ..., + u4s: builtins.bytes | None = ..., + u8s: builtins.bytes | None = ..., + s32s: collections.abc.Iterable[builtins.int] | None = ..., + s64s: collections.abc.Iterable[builtins.int] | None = ..., + u32s: collections.abc.Iterable[builtins.int] | None = ..., + u64s: collections.abc.Iterable[builtins.int] | None = ..., + f32s: collections.abc.Iterable[builtins.float] | None = ..., + f64s: collections.abc.Iterable[builtins.float] | None = ..., + c64s: collections.abc.Iterable[builtins.float] | None = ..., + c128s: collections.abc.Iterable[builtins.float] | None = ..., + tuple_literals: collections.abc.Iterable[global___LiteralProto] | None = ..., + f16s: builtins.bytes | None = ..., + bf16s: builtins.bytes | None = ..., + u16s: builtins.bytes | None = ..., + s16s: builtins.bytes | None = ..., + f8e5m2s: builtins.bytes | None = ..., + f8e4m3fns: builtins.bytes | None = ..., + f8e4m3b11fnuzs: builtins.bytes | None = ..., + f8e5m2fnuzs: builtins.bytes | None = ..., + f8e4m3fnuzs: builtins.bytes | None = ..., + sparse_indices: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bf16s", + b"bf16s", + "c128s", + b"c128s", + "c64s", + b"c64s", + "f16s", + b"f16s", + "f32s", + b"f32s", + "f64s", + b"f64s", + "f8e4m3b11fnuzs", + b"f8e4m3b11fnuzs", + "f8e4m3fns", + b"f8e4m3fns", + "f8e4m3fnuzs", + b"f8e4m3fnuzs", + "f8e5m2fnuzs", + b"f8e5m2fnuzs", + "f8e5m2s", + b"f8e5m2s", + "preds", + b"preds", + "s16s", + b"s16s", + "s2s", + b"s2s", + "s32s", + b"s32s", + "s4s", + b"s4s", + "s64s", + b"s64s", + "s8s", + b"s8s", + "shape", + b"shape", + "sparse_indices", + b"sparse_indices", + "tuple_literals", + b"tuple_literals", + "u16s", + b"u16s", + "u2s", + b"u2s", + "u32s", + b"u32s", + "u4s", + b"u4s", + "u64s", + b"u64s", + "u8s", + b"u8s", + ], + ) -> None: ... + +global___LiteralProto = LiteralProto + +@typing.final +class WindowDimension(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SIZE_FIELD_NUMBER: builtins.int + STRIDE_FIELD_NUMBER: builtins.int + PADDING_LOW_FIELD_NUMBER: builtins.int + PADDING_HIGH_FIELD_NUMBER: builtins.int + WINDOW_DILATION_FIELD_NUMBER: builtins.int + BASE_DILATION_FIELD_NUMBER: builtins.int + WINDOW_REVERSAL_FIELD_NUMBER: builtins.int + size: builtins.int + """The size of the window in this dimension. For a rectangle, this would be + the width or height. + """ + stride: builtins.int + """The stride at which the window moves across the base area in this + dimension. In other words, this is the spacing between different + positions of the window in this dimension. + """ + padding_low: builtins.int + """If positive, means the amount of padding to add to the base area at the low + end of this dimension; if negative, its negative means the number of + elements removed from the low end of this dimension. For example, in the + horizontal dimension of a rectangle, this would be the number of padding + values to pad on the left, given that indices increase when going right. + The actual padding value depends upon the context. Convolution pads with + zeros. ReduceWindow and SelectAndScatter pads with the reduce function's + init value. + """ + padding_high: builtins.int + """As padding_low, but on the high end of this dimension. For example, in the + horizontal dimension of a rectangle, this would be the number of values to + pad on the right, given that indices increase when going right. + """ + window_dilation: builtins.int + """Dilation factor of the sliding window in this dimension. A dilation factor + of 1 means no dilation. window_dilation - 1 no-op entries ("holes") are + implicitly placed between each kernel element. This value may not be less + than 1. See documentation for convolution. + """ + base_dilation: builtins.int + """Dilation factor of the base area in this dimension. A dilation factor of 1 + means no dilation. base_dilation - 1 no-op entries ("holes") are implicitly + placed between each base area element. This value may not be less than 1. + See documentation for convolution. + """ + window_reversal: builtins.bool + """Window reversal means that this dimension was logically reversed before the + operation. + """ + def __init__( + self, + *, + size: builtins.int | None = ..., + stride: builtins.int | None = ..., + padding_low: builtins.int | None = ..., + padding_high: builtins.int | None = ..., + window_dilation: builtins.int | None = ..., + base_dilation: builtins.int | None = ..., + window_reversal: builtins.bool | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "base_dilation", + b"base_dilation", + "padding_high", + b"padding_high", + "padding_low", + b"padding_low", + "size", + b"size", + "stride", + b"stride", + "window_dilation", + b"window_dilation", + "window_reversal", + b"window_reversal", + ], + ) -> None: ... + +global___WindowDimension = WindowDimension + +@typing.final +class Window(google.protobuf.message.Message): + """Describes the windowing in an operation such as convolution. + + The window is moved across a base area and for each position of the + window a computation is performed. The field below describes the + window and the movement of the window across a base area. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DIMENSIONS_FIELD_NUMBER: builtins.int + @property + def dimensions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___WindowDimension]: ... + def __init__(self, *, dimensions: collections.abc.Iterable[global___WindowDimension] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["dimensions", b"dimensions"]) -> None: ... + +global___Window = Window + +@typing.final +class GatherDimensionNumbers(google.protobuf.message.Message): + """Describes the dimension numbers for a gather operation. + + See https://www.tensorflow.org/performance/xla/operation_semantics#gather for + more details. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + OFFSET_DIMS_FIELD_NUMBER: builtins.int + COLLAPSED_SLICE_DIMS_FIELD_NUMBER: builtins.int + START_INDEX_MAP_FIELD_NUMBER: builtins.int + INDEX_VECTOR_DIM_FIELD_NUMBER: builtins.int + OPERAND_BATCHING_DIMS_FIELD_NUMBER: builtins.int + START_INDICES_BATCHING_DIMS_FIELD_NUMBER: builtins.int + index_vector_dim: builtins.int + """The dimension in the start_indices input that contains the starting + indices. + """ + @property + def offset_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """ "Window indices" is a term for a set of indices that index into the + interior of a dynamic-slice from the input tensor, the starting indices for + which were computed from output_gather_dims (see the operation semantic for + how this is defined) and the start_indices tensor. + + The window indices for a specific output index Out is computed as: + + i = 0 + for (k : [0, input_tensor_shape.rank)) + window_indices[k] = + if k in collapsed_slice_dims + then 0 + else Out[offset_dims[i++]] + """ + + @property + def collapsed_slice_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def start_index_map(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """This is interpreted as a map from i to start_index_map[i]. It + transforms the gather index looked up from the start_indices tensor into + the starting index in the input space. + """ + + @property + def operand_batching_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """This is the batch dimensions in the operand.""" + + @property + def start_indices_batching_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """This is the batch dimensions in the index, and it should be the same size + as operand_batching_dims. + """ + + def __init__( + self, + *, + offset_dims: collections.abc.Iterable[builtins.int] | None = ..., + collapsed_slice_dims: collections.abc.Iterable[builtins.int] | None = ..., + start_index_map: collections.abc.Iterable[builtins.int] | None = ..., + index_vector_dim: builtins.int | None = ..., + operand_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., + start_indices_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "collapsed_slice_dims", + b"collapsed_slice_dims", + "index_vector_dim", + b"index_vector_dim", + "offset_dims", + b"offset_dims", + "operand_batching_dims", + b"operand_batching_dims", + "start_index_map", + b"start_index_map", + "start_indices_batching_dims", + b"start_indices_batching_dims", + ], + ) -> None: ... + +global___GatherDimensionNumbers = GatherDimensionNumbers + +@typing.final +class ScatterDimensionNumbers(google.protobuf.message.Message): + """Describes the dimension numbers for a scatter operation. + + All the fields are similar to the corresponding fields in + GatherDimensionNumbers. Differences are noted below. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UPDATE_WINDOW_DIMS_FIELD_NUMBER: builtins.int + INSERTED_WINDOW_DIMS_FIELD_NUMBER: builtins.int + SCATTER_DIMS_TO_OPERAND_DIMS_FIELD_NUMBER: builtins.int + INDEX_VECTOR_DIM_FIELD_NUMBER: builtins.int + INPUT_BATCHING_DIMS_FIELD_NUMBER: builtins.int + SCATTER_INDICES_BATCHING_DIMS_FIELD_NUMBER: builtins.int + index_vector_dim: builtins.int + @property + def update_window_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The set of dimensions in the updates shape that are window dimensions.""" + + @property + def inserted_window_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The set of window dimensions that must be inserted into the updates shape.""" + + @property + def scatter_dims_to_operand_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def input_batching_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """This is the batch dimension in the input.""" + + @property + def scatter_indices_batching_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """This is the batch dimension in the index.""" + + def __init__( + self, + *, + update_window_dims: collections.abc.Iterable[builtins.int] | None = ..., + inserted_window_dims: collections.abc.Iterable[builtins.int] | None = ..., + scatter_dims_to_operand_dims: collections.abc.Iterable[builtins.int] | None = ..., + index_vector_dim: builtins.int | None = ..., + input_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., + scatter_indices_batching_dims: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "index_vector_dim", + b"index_vector_dim", + "input_batching_dims", + b"input_batching_dims", + "inserted_window_dims", + b"inserted_window_dims", + "scatter_dims_to_operand_dims", + b"scatter_dims_to_operand_dims", + "scatter_indices_batching_dims", + b"scatter_indices_batching_dims", + "update_window_dims", + b"update_window_dims", + ], + ) -> None: ... + +global___ScatterDimensionNumbers = ScatterDimensionNumbers + +@typing.final +class ConvolutionDimensionNumbers(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INPUT_BATCH_DIMENSION_FIELD_NUMBER: builtins.int + INPUT_FEATURE_DIMENSION_FIELD_NUMBER: builtins.int + INPUT_SPATIAL_DIMENSIONS_FIELD_NUMBER: builtins.int + KERNEL_INPUT_FEATURE_DIMENSION_FIELD_NUMBER: builtins.int + KERNEL_OUTPUT_FEATURE_DIMENSION_FIELD_NUMBER: builtins.int + KERNEL_SPATIAL_DIMENSIONS_FIELD_NUMBER: builtins.int + OUTPUT_BATCH_DIMENSION_FIELD_NUMBER: builtins.int + OUTPUT_FEATURE_DIMENSION_FIELD_NUMBER: builtins.int + OUTPUT_SPATIAL_DIMENSIONS_FIELD_NUMBER: builtins.int + input_batch_dimension: builtins.int + """The number of the dimension that represents batch in the input.""" + input_feature_dimension: builtins.int + """The number of the dimension that represents features in the input.""" + kernel_input_feature_dimension: builtins.int + """The number of the dimension that represents input features in the + convolutional kernel (rhs). + """ + kernel_output_feature_dimension: builtins.int + """The number of the dimension that represents output features in + the convolutional kernel (rhs). + """ + output_batch_dimension: builtins.int + """The number of the dimension that represents batch in the output.""" + output_feature_dimension: builtins.int + """The number of the dimension that represents features in the output.""" + @property + def input_spatial_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers for the spatial dimensions that the window + moves through in the input. + """ + + @property + def kernel_spatial_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers for the spatial dimensions that the window + moves through in the kernel (rhs). window.strides(0) is the + stride in the kernel_spatial_dimensions(0) dimension. + """ + + @property + def output_spatial_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers for the spatial dimensions that the window + moves through in the output. + """ + + def __init__( + self, + *, + input_batch_dimension: builtins.int | None = ..., + input_feature_dimension: builtins.int | None = ..., + input_spatial_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + kernel_input_feature_dimension: builtins.int | None = ..., + kernel_output_feature_dimension: builtins.int | None = ..., + kernel_spatial_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + output_batch_dimension: builtins.int | None = ..., + output_feature_dimension: builtins.int | None = ..., + output_spatial_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_batch_dimension", + b"input_batch_dimension", + "input_feature_dimension", + b"input_feature_dimension", + "input_spatial_dimensions", + b"input_spatial_dimensions", + "kernel_input_feature_dimension", + b"kernel_input_feature_dimension", + "kernel_output_feature_dimension", + b"kernel_output_feature_dimension", + "kernel_spatial_dimensions", + b"kernel_spatial_dimensions", + "output_batch_dimension", + b"output_batch_dimension", + "output_feature_dimension", + b"output_feature_dimension", + "output_spatial_dimensions", + b"output_spatial_dimensions", + ], + ) -> None: ... + +global___ConvolutionDimensionNumbers = ConvolutionDimensionNumbers + +@typing.final +class DotDimensionNumbers(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LHS_CONTRACTING_DIMENSIONS_FIELD_NUMBER: builtins.int + RHS_CONTRACTING_DIMENSIONS_FIELD_NUMBER: builtins.int + LHS_BATCH_DIMENSIONS_FIELD_NUMBER: builtins.int + RHS_BATCH_DIMENSIONS_FIELD_NUMBER: builtins.int + @property + def lhs_contracting_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers that represent the 'lhs' contracting dimensions.""" + + @property + def rhs_contracting_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers that represent the 'rhs' contracting dimensions.""" + + @property + def lhs_batch_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers that represent the 'lhs' batch dimensions.""" + + @property + def rhs_batch_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension numbers that represent the 'rhs' batch dimensions.""" + + def __init__( + self, + *, + lhs_contracting_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + rhs_contracting_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + lhs_batch_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + rhs_batch_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "lhs_batch_dimensions", + b"lhs_batch_dimensions", + "lhs_contracting_dimensions", + b"lhs_contracting_dimensions", + "rhs_batch_dimensions", + b"rhs_batch_dimensions", + "rhs_contracting_dimensions", + b"rhs_contracting_dimensions", + ], + ) -> None: ... + +global___DotDimensionNumbers = DotDimensionNumbers + +@typing.final +class SparsityDescriptor(google.protobuf.message.Message): + """Contains sparsity metadata for a sparse dot operation. + The only supported type atm is structured 2:4 sparsity, which is natively + supported on NVidia GPUs. + Restrictions: + - only one operand of the dot operation may be sparse; + - only the contracting dimension may be sparse. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + INDEX_FIELD_NUMBER: builtins.int + DIMENSION_FIELD_NUMBER: builtins.int + N_FIELD_NUMBER: builtins.int + M_FIELD_NUMBER: builtins.int + type: global___SparsityType.ValueType + index: builtins.int + """Sparse operand index (0 or 1).""" + dimension: builtins.int + """Sparse dimension number.""" + n: builtins.int + """Structured N:M sparsity (N < M).""" + m: builtins.int + def __init__( + self, + *, + type: global___SparsityType.ValueType | None = ..., + index: builtins.int | None = ..., + dimension: builtins.int | None = ..., + n: builtins.int | None = ..., + m: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["dimension", b"dimension", "index", b"index", "m", b"m", "n", b"n", "type", b"type"] + ) -> None: ... + +global___SparsityDescriptor = SparsityDescriptor + +@typing.final +class TriangularSolveOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Transpose: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _TransposeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TriangularSolveOptions._Transpose.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TRANSPOSE_INVALID: TriangularSolveOptions._Transpose.ValueType # 0 + NO_TRANSPOSE: TriangularSolveOptions._Transpose.ValueType # 1 + """Don't transpose 'a'.""" + TRANSPOSE: TriangularSolveOptions._Transpose.ValueType # 2 + """Transpose 'a'.""" + ADJOINT: TriangularSolveOptions._Transpose.ValueType # 3 + """Complex conjugate and transpose 'a'.""" + + class Transpose(_Transpose, metaclass=_TransposeEnumTypeWrapper): + """Should we transpose or use the adjoint of 'a'?""" + + TRANSPOSE_INVALID: TriangularSolveOptions.Transpose.ValueType # 0 + NO_TRANSPOSE: TriangularSolveOptions.Transpose.ValueType # 1 + """Don't transpose 'a'.""" + TRANSPOSE: TriangularSolveOptions.Transpose.ValueType # 2 + """Transpose 'a'.""" + ADJOINT: TriangularSolveOptions.Transpose.ValueType # 3 + """Complex conjugate and transpose 'a'.""" + + LEFT_SIDE_FIELD_NUMBER: builtins.int + LOWER_FIELD_NUMBER: builtins.int + UNIT_DIAGONAL_FIELD_NUMBER: builtins.int + TRANSPOSE_A_FIELD_NUMBER: builtins.int + left_side: builtins.bool + """If true, solves ax = b. If false, solves xa = b.""" + lower: builtins.bool + """If true, 'a' is lower triangular. If false, 'a' is upper triangular.""" + unit_diagonal: builtins.bool + """If true, the diagonal elements of 'a' are assumed to be 1 and not accessed.""" + transpose_a: global___TriangularSolveOptions.Transpose.ValueType + def __init__( + self, + *, + left_side: builtins.bool | None = ..., + lower: builtins.bool | None = ..., + unit_diagonal: builtins.bool | None = ..., + transpose_a: global___TriangularSolveOptions.Transpose.ValueType | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "left_side", b"left_side", "lower", b"lower", "transpose_a", b"transpose_a", "unit_diagonal", b"unit_diagonal" + ], + ) -> None: ... + +global___TriangularSolveOptions = TriangularSolveOptions + +@typing.final +class CholeskyOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LOWER_FIELD_NUMBER: builtins.int + lower: builtins.bool + """If true, uses the lower triangle of `a`. If false, uses the upper triangle + of `a`. + """ + def __init__(self, *, lower: builtins.bool | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["lower", b"lower"]) -> None: ... + +global___CholeskyOptions = CholeskyOptions + +@typing.final +class SortOptions(google.protobuf.message.Message): + """Attributes of the sort custom call (cub::DeviceRadixSort).""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DESCENDING_FIELD_NUMBER: builtins.int + descending: builtins.bool + def __init__(self, *, descending: builtins.bool | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["descending", b"descending"]) -> None: ... + +global___SortOptions = SortOptions + +@typing.final +class FrontendAttributes(google.protobuf.message.Message): + """Generic map of attributes used to pass hints / configuration options from + the Python frontend to the XLA backend. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class MapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + MAP_FIELD_NUMBER: builtins.int + @property + def map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__(self, *, map: collections.abc.Mapping[builtins.str, builtins.str] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["map", b"map"]) -> None: ... + +global___FrontendAttributes = FrontendAttributes + +@typing.final +class Statistic(google.protobuf.message.Message): + """Represents a single statistic to track.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STAT_NAME_FIELD_NUMBER: builtins.int + STAT_VAL_FIELD_NUMBER: builtins.int + stat_name: builtins.str + """Must be a single word consisting of any alphanumeric characters""" + stat_val: builtins.float + """Must be within a range of [0, 100], in order for the graph dumper to + properly render the statistic onto the graph. + """ + def __init__(self, *, stat_name: builtins.str | None = ..., stat_val: builtins.float | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["stat_name", b"stat_name", "stat_val", b"stat_val"]) -> None: ... + +global___Statistic = Statistic + +@typing.final +class StatisticsViz(google.protobuf.message.Message): + """Represents the information needed to visualize propagation statistics when + rendering an HLO graph. This includes an array of statistics as well as the + index of the statistic to render. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STAT_INDEX_TO_VISUALIZE_FIELD_NUMBER: builtins.int + STATISTICS_FIELD_NUMBER: builtins.int + stat_index_to_visualize: builtins.int + @property + def statistics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statistic]: ... + def __init__( + self, + *, + stat_index_to_visualize: builtins.int | None = ..., + statistics: collections.abc.Iterable[global___Statistic] | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["stat_index_to_visualize", b"stat_index_to_visualize", "statistics", b"statistics"] + ) -> None: ... + +global___StatisticsViz = StatisticsViz + +@typing.final +class OpSharding(google.protobuf.message.Message): + """LINT.IfChange""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Type: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _TypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._Type.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + REPLICATED: OpSharding._Type.ValueType # 0 + """This sharding is replicated across all devices (implies maximal, + all other fields are unused). + """ + MAXIMAL: OpSharding._Type.ValueType # 1 + """This sharding is maximal - one device runs the entire operation.""" + TUPLE: OpSharding._Type.ValueType # 2 + """This sharding is a tuple - only the tuple_shardings field is valid.""" + OTHER: OpSharding._Type.ValueType # 3 + """None of the above; tile_shape and tile_assignment are both used.""" + MANUAL: OpSharding._Type.ValueType # 4 + """This op is manually sharded: the shapes are already partitioned and the + partitioner should not change this op. + """ + UNKNOWN: OpSharding._Type.ValueType # 5 + """This sharding is a placeholder sharding with lowest precedence, it can be + overwriten by any other shardings. + """ + + class Type(_Type, metaclass=_TypeEnumTypeWrapper): ... + REPLICATED: OpSharding.Type.ValueType # 0 + """This sharding is replicated across all devices (implies maximal, + all other fields are unused). + """ + MAXIMAL: OpSharding.Type.ValueType # 1 + """This sharding is maximal - one device runs the entire operation.""" + TUPLE: OpSharding.Type.ValueType # 2 + """This sharding is a tuple - only the tuple_shardings field is valid.""" + OTHER: OpSharding.Type.ValueType # 3 + """None of the above; tile_shape and tile_assignment are both used.""" + MANUAL: OpSharding.Type.ValueType # 4 + """This op is manually sharded: the shapes are already partitioned and the + partitioner should not change this op. + """ + UNKNOWN: OpSharding.Type.ValueType # 5 + """This sharding is a placeholder sharding with lowest precedence, it can be + overwriten by any other shardings. + """ + + class _ShardGroupType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ShardGroupTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpSharding._ShardGroupType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + AS: OpSharding._ShardGroupType.ValueType # 0 + """This op will be sharded exactly the same as the other op. (hard + restriction) + """ + LIKE: OpSharding._ShardGroupType.ValueType # 1 + """This op will try to allow sharding propagation within the same group even + there is no data dependencies among them, but there is no guarantee that + the final shardings within the same group will be exactly the same. (soft + restriction) + """ + + class ShardGroupType(_ShardGroupType, metaclass=_ShardGroupTypeEnumTypeWrapper): + """Used to decide whether this op is to be sharded like some other ops, or to + which other ops will be sharded like. + """ + + AS: OpSharding.ShardGroupType.ValueType # 0 + """This op will be sharded exactly the same as the other op. (hard + restriction) + """ + LIKE: OpSharding.ShardGroupType.ValueType # 1 + """This op will try to allow sharding propagation within the same group even + there is no data dependencies among them, but there is no guarantee that + the final shardings within the same group will be exactly the same. (soft + restriction) + """ + + TYPE_FIELD_NUMBER: builtins.int + TILE_SHAPE_FIELD_NUMBER: builtins.int + TILE_ASSIGNMENT_DIMENSIONS_FIELD_NUMBER: builtins.int + TILE_ASSIGNMENT_DEVICES_FIELD_NUMBER: builtins.int + TUPLE_SHARDINGS_FIELD_NUMBER: builtins.int + REPLICATE_ON_LAST_TILE_DIM_FIELD_NUMBER: builtins.int + METADATA_FIELD_NUMBER: builtins.int + LAST_TILE_DIMS_FIELD_NUMBER: builtins.int + IOTA_RESHAPE_DIMS_FIELD_NUMBER: builtins.int + IOTA_TRANSPOSE_PERM_FIELD_NUMBER: builtins.int + IS_SHARD_GROUP_FIELD_NUMBER: builtins.int + SHARD_GROUP_ID_FIELD_NUMBER: builtins.int + SHARD_GROUP_TYPE_FIELD_NUMBER: builtins.int + type: global___OpSharding.Type.ValueType + replicate_on_last_tile_dim: builtins.bool + """Only used for OTHER type. If true, data is sharded according to other + dimensions of tile_assignment(), but replicated across devices along the + last dimension. (Experimental) + """ + is_shard_group: builtins.bool + """This field decides whether this op is in a shard group.""" + shard_group_id: builtins.int + """This field is used to store the unique id of the shard group.""" + shard_group_type: global___OpSharding.ShardGroupType.ValueType + @property + def tile_shape(self) -> global___ShapeProto: + """The shape of the sharded tile.""" + + @property + def tile_assignment_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The shape of the tile assignment tensor - this must be the same rank as + tile_shape and the product of its dimensions must equal + tile_assignment_devices.size(). + """ + + @property + def tile_assignment_devices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Flattened list of device IDs. The order of flattening is the same as used + by IndexUtil::MultiToLinearIndex(tile_assignment_shape). + Only one of tile_assignment_devices and iota_dimensions shall be non-empty. + """ + + @property + def tuple_shardings(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpSharding]: + """If type == TUPLE, the sub-shardings, one per leaf node in the tuple shape, + in pre-order. The tuple shape could be nested; here we store just a + flattened list of all leaves in the tuple shape. Note that the tuple shape + is not stored here; shardings do not store the shapes to which they are + applied, this is inferred from the instruction this sharding gets attached + to. + """ + + @property + def metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpMetadata]: + """This field is used to track the source of this sharding, usually derived + from instructions. Multple metadata may be populated if sharding is + combined with other shardings. Metadata are to not be populated when + type == TUPLE and instead metadata should be set on individual tuple + elements. + """ + + @property + def last_tile_dims( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___OpSharding.Type.ValueType]: + """This field is used to represented the sharding type of each subgroup. + For example, sharding={devices=[2,2,2,2]0,1,2,...,15 last_tile_dims={ + replicate, manual, unreduced}} means that each of the last 3 dimensions + in [2,2,2,2] represents a subgrouping in replicate, manual, + unreduced sharding type respectively. + """ + + @property + def iota_reshape_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Dimensions used to reshape the 1D iota array of device IDs. + Only one of tile_assignment_devices and iota_reshape_dims shall be + non-empty. + """ + + @property + def iota_transpose_perm(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Dimension permutations to transposed the iota array reshaped to + iota_reshape_dims. This must have the same size as iota_reshape_dims. + """ + + def __init__( + self, + *, + type: global___OpSharding.Type.ValueType | None = ..., + tile_shape: global___ShapeProto | None = ..., + tile_assignment_dimensions: collections.abc.Iterable[builtins.int] | None = ..., + tile_assignment_devices: collections.abc.Iterable[builtins.int] | None = ..., + tuple_shardings: collections.abc.Iterable[global___OpSharding] | None = ..., + replicate_on_last_tile_dim: builtins.bool | None = ..., + metadata: collections.abc.Iterable[global___OpMetadata] | None = ..., + last_tile_dims: collections.abc.Iterable[global___OpSharding.Type.ValueType] | None = ..., + iota_reshape_dims: collections.abc.Iterable[builtins.int] | None = ..., + iota_transpose_perm: collections.abc.Iterable[builtins.int] | None = ..., + is_shard_group: builtins.bool | None = ..., + shard_group_id: builtins.int | None = ..., + shard_group_type: global___OpSharding.ShardGroupType.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["tile_shape", b"tile_shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "iota_reshape_dims", + b"iota_reshape_dims", + "iota_transpose_perm", + b"iota_transpose_perm", + "is_shard_group", + b"is_shard_group", + "last_tile_dims", + b"last_tile_dims", + "metadata", + b"metadata", + "replicate_on_last_tile_dim", + b"replicate_on_last_tile_dim", + "shard_group_id", + b"shard_group_id", + "shard_group_type", + b"shard_group_type", + "tile_assignment_devices", + b"tile_assignment_devices", + "tile_assignment_dimensions", + b"tile_assignment_dimensions", + "tile_shape", + b"tile_shape", + "tuple_shardings", + b"tuple_shardings", + "type", + b"type", + ], + ) -> None: ... + +global___OpSharding = OpSharding + +@typing.final +class ReplicaGroup(google.protobuf.message.Message): + """Describes the replica groups in a cross replica op (e.g., all-reduce and + all-to-all). + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REPLICA_IDS_FIELD_NUMBER: builtins.int + @property + def replica_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The ids of the replicas that belongs to the same group. The ordering of the + ids matters in some ops (e.g., all-to-all). + """ + + def __init__(self, *, replica_ids: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["replica_ids", b"replica_ids"]) -> None: ... + +global___ReplicaGroup = ReplicaGroup + +@typing.final +class IotaReplicaGroupListProto(google.protobuf.message.Message): + """Represents a list of replica groups (a list of list of devices) with + reshaping and transposing an iota array (iota tile assignment). Can be used + to represent certain common patterns of device lists in a compact, scalable + format. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NUM_REPLICA_GROUPS_FIELD_NUMBER: builtins.int + NUM_DEVICES_PER_GROUP_FIELD_NUMBER: builtins.int + IOTA_RESHAPE_DIMS_FIELD_NUMBER: builtins.int + IOTA_TRANSPOSE_PERM_FIELD_NUMBER: builtins.int + num_replica_groups: builtins.int + """Number of replica groups.""" + num_devices_per_group: builtins.int + """Number of devices per group.""" + @property + def iota_reshape_dims(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimensions used to reshape the 1D iota array of device IDs.""" + + @property + def iota_transpose_perm(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The dimension permutations to transposed the iota array reshaped to + iota_reshape_dims. This must have the same size as iota_reshape_dims. + """ + + def __init__( + self, + *, + num_replica_groups: builtins.int | None = ..., + num_devices_per_group: builtins.int | None = ..., + iota_reshape_dims: collections.abc.Iterable[builtins.int] | None = ..., + iota_transpose_perm: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "iota_reshape_dims", + b"iota_reshape_dims", + "iota_transpose_perm", + b"iota_transpose_perm", + "num_devices_per_group", + b"num_devices_per_group", + "num_replica_groups", + b"num_replica_groups", + ], + ) -> None: ... + +global___IotaReplicaGroupListProto = IotaReplicaGroupListProto + +@typing.final +class CollectiveDeviceListProto(google.protobuf.message.Message): + """Represents a series of devices participating in a collective operation (e.g., + all-reduce and all-to-all). While this directly translates to a list of + replica groups, it may be used to represent these lists in a compact form. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REPLICA_GROUPS_FIELD_NUMBER: builtins.int + IOTA_REPLICA_GROUP_LIST_FIELD_NUMBER: builtins.int + @property + def replica_groups(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ReplicaGroup]: + """ReplicaGroupV1: List of replica groups. Legacy way of representing device + lists. + """ + + @property + def iota_replica_group_list(self) -> global___IotaReplicaGroupListProto: + """ReplicaGroupV2: Represents a list of replica groups with reshaping and + transposing an iota array. + """ + + def __init__( + self, + *, + replica_groups: collections.abc.Iterable[global___ReplicaGroup] | None = ..., + iota_replica_group_list: global___IotaReplicaGroupListProto | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["iota_replica_group_list", b"iota_replica_group_list"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal["iota_replica_group_list", b"iota_replica_group_list", "replica_groups", b"replica_groups"], + ) -> None: ... + +global___CollectiveDeviceListProto = CollectiveDeviceListProto + +@typing.final +class SourceTarget(google.protobuf.message.Message): + """Describes the source target pair in the collective permute op.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SOURCE_FIELD_NUMBER: builtins.int + TARGET_FIELD_NUMBER: builtins.int + source: builtins.int + target: builtins.int + def __init__(self, *, source: builtins.int | None = ..., target: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["source", b"source", "target", b"target"]) -> None: ... + +global___SourceTarget = SourceTarget + +@typing.final +class PrecisionConfig(google.protobuf.message.Message): + """Used to indicate the precision configuration. It has backend specific + meaning. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Precision: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _PrecisionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Precision.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + DEFAULT: PrecisionConfig._Precision.ValueType # 0 + HIGH: PrecisionConfig._Precision.ValueType # 1 + HIGHEST: PrecisionConfig._Precision.ValueType # 2 + PACKED_NIBBLE: PrecisionConfig._Precision.ValueType # 3 + """Each U8/S8 value in a tensor actually represents 2 nibble values.""" + + class Precision(_Precision, metaclass=_PrecisionEnumTypeWrapper): ... + DEFAULT: PrecisionConfig.Precision.ValueType # 0 + HIGH: PrecisionConfig.Precision.ValueType # 1 + HIGHEST: PrecisionConfig.Precision.ValueType # 2 + PACKED_NIBBLE: PrecisionConfig.Precision.ValueType # 3 + """Each U8/S8 value in a tensor actually represents 2 nibble values.""" + + class _Algorithm: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _AlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PrecisionConfig._Algorithm.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + ALG_UNSET: PrecisionConfig._Algorithm.ValueType # 0 + """If the algorithm is `ALG_UNSET`, we will decide the algorithm based on + the operand_precision values (for now). + """ + ALG_DOT_ANY_F8_ANY_F8_F32: PrecisionConfig._Algorithm.ValueType # 1 + """The storage type can be any 8-bit floating point type.""" + ALG_DOT_ANY_F8_ANY_F8_F32_FAST_ACCUM: PrecisionConfig._Algorithm.ValueType # 2 + """The storage type can be any 8-bit floating point type. Intermediate + results will not periodically be promoted to a higher precision. This + corresponds to CUBLASLT_MATMUL_DESC_FAST_ACCUM. Triton's + maxNumImpreciseAcc=32 setting may be similar. + """ + ALG_DOT_F16_F16_F16: PrecisionConfig._Algorithm.ValueType # 3 + ALG_DOT_F16_F16_F32: PrecisionConfig._Algorithm.ValueType # 4 + ALG_DOT_BF16_BF16_BF16: PrecisionConfig._Algorithm.ValueType # 5 + ALG_DOT_BF16_BF16_F32: PrecisionConfig._Algorithm.ValueType # 6 + ALG_DOT_BF16_BF16_F32_X3: PrecisionConfig._Algorithm.ValueType # 7 + """An algorithm which uses 3 BF16_BF16_F32 matmuls to achieve better + precision. + """ + ALG_DOT_BF16_BF16_F32_X6: PrecisionConfig._Algorithm.ValueType # 8 + """An algorithm which uses 6 BF16_BF16_F32 matmuls to achieve better + precision (similar to F32). + """ + ALG_DOT_TF32_TF32_F32: PrecisionConfig._Algorithm.ValueType # 9 + ALG_DOT_TF32_TF32_F32_X3: PrecisionConfig._Algorithm.ValueType # 10 + """An algorithm which uses 3 TF32_TF32_F32 matmuls to achieve better + precision (similar to F32). + """ + ALG_DOT_F32_F32_F32: PrecisionConfig._Algorithm.ValueType # 11 + ALG_DOT_F64_F64_F64: PrecisionConfig._Algorithm.ValueType # 12 + + class Algorithm(_Algorithm, metaclass=_AlgorithmEnumTypeWrapper): + """The algorithm used to evaluate the instruction. + + The naming convention for the dot instruction is + ALG_DOT_{A_TYPE}_{B_TYPE}_{ACCUM_TYPE}[_X{NUM_OPS}] where A_TYPE, B_TYPE + and ACCUM_TYPE correspond to the types in the "primitive dot operations" + (such as TensorCore operations) and NUM_OPS is the number of such + operations used per "primitive tile". When the NUM_OPS + field is skipped, it is assumed to be 1. The types mentioned in the name + are independent of the storage types. + + In general ATYPE and BTYPE are the precisions that the LHS and RHS of the + operation are rounded to and ACCUMTYPE is the accumulation type. If a + backend does not support the given algorithm, an error is raised. The + Algorithm enum is intended to eventually replace the Precision enum. + """ + + ALG_UNSET: PrecisionConfig.Algorithm.ValueType # 0 + """If the algorithm is `ALG_UNSET`, we will decide the algorithm based on + the operand_precision values (for now). + """ + ALG_DOT_ANY_F8_ANY_F8_F32: PrecisionConfig.Algorithm.ValueType # 1 + """The storage type can be any 8-bit floating point type.""" + ALG_DOT_ANY_F8_ANY_F8_F32_FAST_ACCUM: PrecisionConfig.Algorithm.ValueType # 2 + """The storage type can be any 8-bit floating point type. Intermediate + results will not periodically be promoted to a higher precision. This + corresponds to CUBLASLT_MATMUL_DESC_FAST_ACCUM. Triton's + maxNumImpreciseAcc=32 setting may be similar. + """ + ALG_DOT_F16_F16_F16: PrecisionConfig.Algorithm.ValueType # 3 + ALG_DOT_F16_F16_F32: PrecisionConfig.Algorithm.ValueType # 4 + ALG_DOT_BF16_BF16_BF16: PrecisionConfig.Algorithm.ValueType # 5 + ALG_DOT_BF16_BF16_F32: PrecisionConfig.Algorithm.ValueType # 6 + ALG_DOT_BF16_BF16_F32_X3: PrecisionConfig.Algorithm.ValueType # 7 + """An algorithm which uses 3 BF16_BF16_F32 matmuls to achieve better + precision. + """ + ALG_DOT_BF16_BF16_F32_X6: PrecisionConfig.Algorithm.ValueType # 8 + """An algorithm which uses 6 BF16_BF16_F32 matmuls to achieve better + precision (similar to F32). + """ + ALG_DOT_TF32_TF32_F32: PrecisionConfig.Algorithm.ValueType # 9 + ALG_DOT_TF32_TF32_F32_X3: PrecisionConfig.Algorithm.ValueType # 10 + """An algorithm which uses 3 TF32_TF32_F32 matmuls to achieve better + precision (similar to F32). + """ + ALG_DOT_F32_F32_F32: PrecisionConfig.Algorithm.ValueType # 11 + ALG_DOT_F64_F64_F64: PrecisionConfig.Algorithm.ValueType # 12 + + OPERAND_PRECISION_FIELD_NUMBER: builtins.int + ALGORITHM_FIELD_NUMBER: builtins.int + algorithm: global___PrecisionConfig.Algorithm.ValueType + """Currently doesn't do anything, but we plan to support it for dot and + possibly more instructions. + + TODO(b/316147294): Support this on GPU and add this to StableHLO as well. + + If this is set, then `operand_precision` should be set to DEFAULT and it + will be ignored. + """ + @property + def operand_precision( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PrecisionConfig.Precision.ValueType]: ... + def __init__( + self, + *, + operand_precision: collections.abc.Iterable[global___PrecisionConfig.Precision.ValueType] | None = ..., + algorithm: global___PrecisionConfig.Algorithm.ValueType | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["algorithm", b"algorithm", "operand_precision", b"operand_precision"] + ) -> None: ... + +global___PrecisionConfig = PrecisionConfig + +@typing.final +class ParameterReplication(google.protobuf.message.Message): + """Describes whether all data-parallelism replicas will receive the same + parameter data at each buffer. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REPLICATED_AT_LEAF_BUFFERS_FIELD_NUMBER: builtins.int + @property + def replicated_at_leaf_buffers(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """A list of boolean values for the flattened leaf buffers. Each value + indicates whether the corresponding leaf buffer is replicated. + + If this field is empty, it means no buffer is replicated. Otherwise, the + number of elements in this field must match the number of leaf buffers in + the HLO instruction's shape. + """ + + def __init__(self, *, replicated_at_leaf_buffers: collections.abc.Iterable[builtins.bool] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["replicated_at_leaf_buffers", b"replicated_at_leaf_buffers"]) -> None: ... + +global___ParameterReplication = ParameterReplication + +@typing.final +class WhileLoopBackendConfig(google.protobuf.message.Message): + """A backend-config for kWhile loops that stores the loop's trip count, if it is + known. + + This is useful for backends that can implement a `for i in 0..N` loop more + efficiently than a `while` loop. For example, on GPUs, we can implement a + `for i in 0..N` loop by enqueueing the kernels for the loop body N times, + whereas implementing a `while` loop requires a host-device sync on each + iteration. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class KnownTripCount(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + N_FIELD_NUMBER: builtins.int + n: builtins.int + def __init__(self, *, n: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["n", b"n"]) -> None: ... + + KNOWN_TRIP_COUNT_FIELD_NUMBER: builtins.int + @property + def known_trip_count(self) -> global___WhileLoopBackendConfig.KnownTripCount: + """This indirection lets us distinguish between known-trip-count == 0 and + unknown-trip-count. + """ + + def __init__(self, *, known_trip_count: global___WhileLoopBackendConfig.KnownTripCount | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["known_trip_count", b"known_trip_count"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["known_trip_count", b"known_trip_count"]) -> None: ... + +global___WhileLoopBackendConfig = WhileLoopBackendConfig + +@typing.final +class OutputOperandAliasing(google.protobuf.message.Message): + """Specifies a pair of output/operand buffers that alias each other for + kCustomCall and kFusion + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + OUTPUT_SHAPE_INDEX_FIELD_NUMBER: builtins.int + OPERAND_INDEX_FIELD_NUMBER: builtins.int + OPERAND_SHAPE_INDEX_FIELD_NUMBER: builtins.int + operand_index: builtins.int + @property + def output_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def operand_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + output_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + operand_index: builtins.int | None = ..., + operand_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "operand_index", + b"operand_index", + "operand_shape_index", + b"operand_shape_index", + "output_shape_index", + b"output_shape_index", + ], + ) -> None: ... + +global___OutputOperandAliasing = OutputOperandAliasing + +@typing.final +class OriginalArrayProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEAF_SHAPE_INDEX_FIELD_NUMBER: builtins.int + INSTRUCTION_NAME_FIELD_NUMBER: builtins.int + SHAPE_INDEX_FIELD_NUMBER: builtins.int + instruction_name: builtins.str + @property + def leaf_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + leaf_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + instruction_name: builtins.str | None = ..., + shape_index: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "instruction_name", b"instruction_name", "leaf_shape_index", b"leaf_shape_index", "shape_index", b"shape_index" + ], + ) -> None: ... + +global___OriginalArrayProto = OriginalArrayProto + +@typing.final +class OriginalValueProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEAVES_FIELD_NUMBER: builtins.int + @property + def leaves(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OriginalArrayProto]: ... + def __init__(self, *, leaves: collections.abc.Iterable[global___OriginalArrayProto] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["leaves", b"leaves"]) -> None: ... + +global___OriginalValueProto = OriginalValueProto diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi new file mode 100644 index 0000000000..17820b9e12 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/compiler/xla/xla_pb2.pyi @@ -0,0 +1,2558 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2017 The OpenXLA Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============================================================================== +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.any_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import tensorflow.compiler.xla.service.hlo_pb2 +import tensorflow.compiler.xla.xla_data_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class CompilationEnvironmentsProto(google.protobuf.message.Message): + """Proto version of `xla::CompilationEnvironments`.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENVIRONMENTS_FIELD_NUMBER: builtins.int + @property + def environments( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ... + def __init__(self, *, environments: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["environments", b"environments"]) -> None: ... + +global___CompilationEnvironmentsProto = CompilationEnvironmentsProto + +@typing.final +class DebugOptions(google.protobuf.message.Message): + """Debugging options for XLA. These options may change at any time - there are + no guarantees about backward or forward compatibility for these fields. + + Debug options naming and organization: + + 1. Backend-agnostic options: `xla_$flag_name` - go first, and sorted + alphabetically by the flag name. + + 2. Backend-specific options: `xla_$backend_$flag_name` - must be in the + corresponding backend section, and sorted alphabetically by the flag name. + --------------------------------------------------------------------------// + XLA backend-agnostic options. + --------------------------------------------------------------------------// + go/keep-sorted start + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ShapeChecks: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ShapeChecksEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._ShapeChecks.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + IGNORE: DebugOptions._ShapeChecks.ValueType # 0 + """Do not insert any shape checks for dynamically shaped operations; output + buffers might contain garbage data if shapes don't match. + """ + RUNTIME: DebugOptions._ShapeChecks.ValueType # 1 + """Check shapes at runtime, will insert an extra synchronization if shapes + cannot be proven correct at compile time. + """ + COMPILE_TIME: DebugOptions._ShapeChecks.ValueType # 2 + """Will refuse to compile any program where shape correctness can not be + established at compile time. + """ + + class ShapeChecks(_ShapeChecks, metaclass=_ShapeChecksEnumTypeWrapper): ... + IGNORE: DebugOptions.ShapeChecks.ValueType # 0 + """Do not insert any shape checks for dynamically shaped operations; output + buffers might contain garbage data if shapes don't match. + """ + RUNTIME: DebugOptions.ShapeChecks.ValueType # 1 + """Check shapes at runtime, will insert an extra synchronization if shapes + cannot be proven correct at compile time. + """ + COMPILE_TIME: DebugOptions.ShapeChecks.ValueType # 2 + """Will refuse to compile any program where shape correctness can not be + established at compile time. + """ + + class _StepMarkerLocation: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _StepMarkerLocationEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._StepMarkerLocation.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STEP_MARK_AT_ENTRY: DebugOptions._StepMarkerLocation.ValueType # 0 + """Generate a step marker at the program entry. This handles the case where + each step is done by one or multiple program execution(s). Only the first + program will be tagged for generating a step marker at the program entry. + This is the default. + """ + STEP_MARK_AT_TOP_LEVEL_WHILE_LOOP: DebugOptions._StepMarkerLocation.ValueType # 1 + """Generate a step marker at each iteration of the top level while loop, + which is assumed to be a training loop. + """ + STEP_MARK_AT_SECOND_LEVEL_WHILE_LOOP: DebugOptions._StepMarkerLocation.ValueType # 3 + """Generate a step marker at each iteration of the second level while loops, + which is assumed to be a training or eval loop. + """ + STEP_MARK_NONE: DebugOptions._StepMarkerLocation.ValueType # 2 + """No step marker generated.""" + + class StepMarkerLocation(_StepMarkerLocation, metaclass=_StepMarkerLocationEnumTypeWrapper): ... + STEP_MARK_AT_ENTRY: DebugOptions.StepMarkerLocation.ValueType # 0 + """Generate a step marker at the program entry. This handles the case where + each step is done by one or multiple program execution(s). Only the first + program will be tagged for generating a step marker at the program entry. + This is the default. + """ + STEP_MARK_AT_TOP_LEVEL_WHILE_LOOP: DebugOptions.StepMarkerLocation.ValueType # 1 + """Generate a step marker at each iteration of the top level while loop, + which is assumed to be a training loop. + """ + STEP_MARK_AT_SECOND_LEVEL_WHILE_LOOP: DebugOptions.StepMarkerLocation.ValueType # 3 + """Generate a step marker at each iteration of the second level while loops, + which is assumed to be a training or eval loop. + """ + STEP_MARK_NONE: DebugOptions.StepMarkerLocation.ValueType # 2 + """No step marker generated.""" + + class _CollectiveOpType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _CollectiveOpTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CollectiveOpType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NOOP: DebugOptions._CollectiveOpType.ValueType # 0 + ALLREDUCE: DebugOptions._CollectiveOpType.ValueType # 1 + ALLGATHER: DebugOptions._CollectiveOpType.ValueType # 2 + REDUCESCATTER: DebugOptions._CollectiveOpType.ValueType # 3 + COLLECTIVEBROADCAST: DebugOptions._CollectiveOpType.ValueType # 4 + ALLTOALL: DebugOptions._CollectiveOpType.ValueType # 5 + COLLECTIVEPERMUTE: DebugOptions._CollectiveOpType.ValueType # 6 + + class CollectiveOpType(_CollectiveOpType, metaclass=_CollectiveOpTypeEnumTypeWrapper): + """Enum to define all collective ops + that xla supports. + """ + + NOOP: DebugOptions.CollectiveOpType.ValueType # 0 + ALLREDUCE: DebugOptions.CollectiveOpType.ValueType # 1 + ALLGATHER: DebugOptions.CollectiveOpType.ValueType # 2 + REDUCESCATTER: DebugOptions.CollectiveOpType.ValueType # 3 + COLLECTIVEBROADCAST: DebugOptions.CollectiveOpType.ValueType # 4 + ALLTOALL: DebugOptions.CollectiveOpType.ValueType # 5 + COLLECTIVEPERMUTE: DebugOptions.CollectiveOpType.ValueType # 6 + + class _CommandBufferCmdType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _CommandBufferCmdTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._CommandBufferCmdType.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: DebugOptions._CommandBufferCmdType.ValueType # 0 + FUSION: DebugOptions._CommandBufferCmdType.ValueType # 1 + CUBLAS: DebugOptions._CommandBufferCmdType.ValueType # 2 + CUDNN: DebugOptions._CommandBufferCmdType.ValueType # 3 + COLLECTIVES: DebugOptions._CommandBufferCmdType.ValueType # 4 + CONDITIONALS: DebugOptions._CommandBufferCmdType.ValueType # 5 + CUSTOM_CALL: DebugOptions._CommandBufferCmdType.ValueType # 6 + CUBLASLT: DebugOptions._CommandBufferCmdType.ValueType # 7 + + class CommandBufferCmdType(_CommandBufferCmdType, metaclass=_CommandBufferCmdTypeEnumTypeWrapper): + """Commands are categorized into 5 types: + FUSION represents regular fusion kernels. + CUBLAS/CUBLASLT, CUDNN, and COLLECTIVES represent library calls. + CONDITIONALS represents control flow. + """ + + INVALID: DebugOptions.CommandBufferCmdType.ValueType # 0 + FUSION: DebugOptions.CommandBufferCmdType.ValueType # 1 + CUBLAS: DebugOptions.CommandBufferCmdType.ValueType # 2 + CUDNN: DebugOptions.CommandBufferCmdType.ValueType # 3 + COLLECTIVES: DebugOptions.CommandBufferCmdType.ValueType # 4 + CONDITIONALS: DebugOptions.CommandBufferCmdType.ValueType # 5 + CUSTOM_CALL: DebugOptions.CommandBufferCmdType.ValueType # 6 + CUBLASLT: DebugOptions.CommandBufferCmdType.ValueType # 7 + + class _PartitioningAlgorithm: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _PartitioningAlgorithmEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._PartitioningAlgorithm.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PARTITIONING_ALGORITHM_NOOP: DebugOptions._PartitioningAlgorithm.ValueType # 0 + PARTITIONING_ALGORITHM_EXP0: DebugOptions._PartitioningAlgorithm.ValueType # 1 + PARTITIONING_ALGORITHM_EXP1: DebugOptions._PartitioningAlgorithm.ValueType # 2 + PARTITIONING_ALGORITHM_EXP2: DebugOptions._PartitioningAlgorithm.ValueType # 3 + + class PartitioningAlgorithm(_PartitioningAlgorithm, metaclass=_PartitioningAlgorithmEnumTypeWrapper): ... + PARTITIONING_ALGORITHM_NOOP: DebugOptions.PartitioningAlgorithm.ValueType # 0 + PARTITIONING_ALGORITHM_EXP0: DebugOptions.PartitioningAlgorithm.ValueType # 1 + PARTITIONING_ALGORITHM_EXP1: DebugOptions.PartitioningAlgorithm.ValueType # 2 + PARTITIONING_ALGORITHM_EXP2: DebugOptions.PartitioningAlgorithm.ValueType # 3 + + class _WhileLoopUnrolling: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _WhileLoopUnrollingEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._WhileLoopUnrolling.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + WHILE_LOOP_UNROLLING_NO_UNROLL: DebugOptions._WhileLoopUnrolling.ValueType # 0 + WHILE_LOOP_UNROLLING_DOUBLE_BUFFER: DebugOptions._WhileLoopUnrolling.ValueType # 1 + """Has the same effect as setting + `xla_gpu_enable_while_loop_double_buffering`. + """ + WHILE_LOOP_UNROLLING_FULL_UNROLL: DebugOptions._WhileLoopUnrolling.ValueType # 2 + """Enables full loop unrolling using the same strategy as `DOUBLE_BUFFER`.""" + + class WhileLoopUnrolling(_WhileLoopUnrolling, metaclass=_WhileLoopUnrollingEnumTypeWrapper): ... + WHILE_LOOP_UNROLLING_NO_UNROLL: DebugOptions.WhileLoopUnrolling.ValueType # 0 + WHILE_LOOP_UNROLLING_DOUBLE_BUFFER: DebugOptions.WhileLoopUnrolling.ValueType # 1 + """Has the same effect as setting + `xla_gpu_enable_while_loop_double_buffering`. + """ + WHILE_LOOP_UNROLLING_FULL_UNROLL: DebugOptions.WhileLoopUnrolling.ValueType # 2 + """Enables full loop unrolling using the same strategy as `DOUBLE_BUFFER`.""" + + class _AutotuneCacheMode: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _AutotuneCacheModeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DebugOptions._AutotuneCacheMode.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + AUTOTUNE_CACHE_MODE_UNSPECIFIED: DebugOptions._AutotuneCacheMode.ValueType # 0 + AUTOTUNE_CACHE_MODE_UPDATE: DebugOptions._AutotuneCacheMode.ValueType # 1 + """If the cache exists per fusion autotuner loads it and terminates, + otherwise runs autotuner and dumps the result. + """ + AUTOTUNE_CACHE_MODE_READ: DebugOptions._AutotuneCacheMode.ValueType # 2 + """Sets readonly access to the cache for the per fusion autotuner. Same as + above, but doesn't dump anything. + """ + + class AutotuneCacheMode(_AutotuneCacheMode, metaclass=_AutotuneCacheModeEnumTypeWrapper): ... + AUTOTUNE_CACHE_MODE_UNSPECIFIED: DebugOptions.AutotuneCacheMode.ValueType # 0 + AUTOTUNE_CACHE_MODE_UPDATE: DebugOptions.AutotuneCacheMode.ValueType # 1 + """If the cache exists per fusion autotuner loads it and terminates, + otherwise runs autotuner and dumps the result. + """ + AUTOTUNE_CACHE_MODE_READ: DebugOptions.AutotuneCacheMode.ValueType # 2 + """Sets readonly access to the cache for the per fusion autotuner. Same as + above, but doesn't dump anything. + """ + + @typing.final + class XlaBackendExtraOptionsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__(self, *, key: builtins.str | None = ..., value: builtins.str | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + XLA_CPU_ENABLE_CONCURRENCY_OPTIMIZED_SCHEDULER_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_FAST_MATH_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_FAST_MIN_MAX_FIELD_NUMBER: builtins.int + XLA_CPU_FAST_MATH_HONOR_DIVISION_FIELD_NUMBER: builtins.int + XLA_CPU_FAST_MATH_HONOR_FUNCTIONS_FIELD_NUMBER: builtins.int + XLA_CPU_FAST_MATH_HONOR_INFS_FIELD_NUMBER: builtins.int + XLA_CPU_FAST_MATH_HONOR_NANS_FIELD_NUMBER: builtins.int + XLA_CPU_USE_THUNK_RUNTIME_FIELD_NUMBER: builtins.int + XLA_CPU_PARALLEL_CODEGEN_SPLIT_COUNT_FIELD_NUMBER: builtins.int + XLA_CPU_PREFER_VECTOR_WIDTH_FIELD_NUMBER: builtins.int + XLA_GPU_EXPERIMENTAL_AUTOTUNE_CACHE_MODE_FIELD_NUMBER: builtins.int + XLA_GPU_EXPERIMENTAL_DISABLE_BINARY_LIBRARIES_FIELD_NUMBER: builtins.int + XLA_GPU_EXPERIMENTAL_ENABLE_TRITON_SOFTMAX_PRIORITY_FUSION_FIELD_NUMBER: builtins.int + XLA_GPU_UNSUPPORTED_ENABLE_TRITON_GEMM_FIELD_NUMBER: builtins.int + XLA_HLO_GRAPH_ADDRESSES_FIELD_NUMBER: builtins.int + XLA_HLO_PROFILE_FIELD_NUMBER: builtins.int + XLA_DISABLE_HLO_PASSES_FIELD_NUMBER: builtins.int + XLA_ENABLE_HLO_PASSES_ONLY_FIELD_NUMBER: builtins.int + XLA_DISABLE_ALL_HLO_PASSES_FIELD_NUMBER: builtins.int + XLA_BACKEND_OPTIMIZATION_LEVEL_FIELD_NUMBER: builtins.int + XLA_EMBED_IR_IN_EXECUTABLE_FIELD_NUMBER: builtins.int + XLA_ELIMINATE_HLO_IMPLICIT_BROADCAST_FIELD_NUMBER: builtins.int + XLA_CPU_MULTI_THREAD_EIGEN_FIELD_NUMBER: builtins.int + XLA_GPU_CUDA_DATA_DIR_FIELD_NUMBER: builtins.int + XLA_GPU_FTZ_FIELD_NUMBER: builtins.int + XLA_LLVM_ENABLE_ALIAS_SCOPE_METADATA_FIELD_NUMBER: builtins.int + XLA_LLVM_ENABLE_NOALIAS_METADATA_FIELD_NUMBER: builtins.int + XLA_LLVM_ENABLE_INVARIANT_LOAD_METADATA_FIELD_NUMBER: builtins.int + XLA_LLVM_DISABLE_EXPENSIVE_PASSES_FIELD_NUMBER: builtins.int + XLA_TEST_ALL_OUTPUT_LAYOUTS_FIELD_NUMBER: builtins.int + XLA_TEST_ALL_INPUT_LAYOUTS_FIELD_NUMBER: builtins.int + XLA_HLO_GRAPH_SHARDING_COLOR_FIELD_NUMBER: builtins.int + XLA_CPU_USE_MKL_DNN_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_FAST_MIN_MAX_FIELD_NUMBER: builtins.int + XLA_ALLOW_EXCESS_PRECISION_FIELD_NUMBER: builtins.int + XLA_GPU_CRASH_ON_VERIFICATION_FAILURES_FIELD_NUMBER: builtins.int + XLA_GPU_AUTOTUNE_LEVEL_FIELD_NUMBER: builtins.int + XLA_FORCE_HOST_PLATFORM_DEVICE_COUNT_FIELD_NUMBER: builtins.int + XLA_GPU_DISABLE_GPUASM_OPTIMIZATIONS_FIELD_NUMBER: builtins.int + XLA_GPU_SHAPE_CHECKS_FIELD_NUMBER: builtins.int + XLA_HLO_EVALUATOR_USE_FAST_PATH_FIELD_NUMBER: builtins.int + XLA_ALLOW_SCALAR_INDEX_DYNAMIC_OPS_FIELD_NUMBER: builtins.int + XLA_STEP_MARKER_LOCATION_FIELD_NUMBER: builtins.int + XLA_DUMP_TO_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_MODULE_RE_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_PASS_RE_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_TEXT_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_PROTO_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_DOT_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_URL_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_HTML_FIELD_NUMBER: builtins.int + XLA_DUMP_FUSION_VISUALIZATION_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_SNAPSHOTS_FIELD_NUMBER: builtins.int + XLA_DUMP_INCLUDE_TIMESTAMP_FIELD_NUMBER: builtins.int + XLA_DUMP_MAX_HLO_MODULES_FIELD_NUMBER: builtins.int + XLA_DUMP_MODULE_METADATA_FIELD_NUMBER: builtins.int + XLA_DUMP_COMPRESS_PROTOS_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_AS_LONG_TEXT_FIELD_NUMBER: builtins.int + XLA_GPU_FORCE_CONV_NCHW_FIELD_NUMBER: builtins.int + XLA_GPU_FORCE_CONV_NHWC_FIELD_NUMBER: builtins.int + XLA_GPU_PTX_FILE_FIELD_NUMBER: builtins.int + XLA_GPU_DUMP_LLVMIR_FIELD_NUMBER: builtins.int + XLA_DUMP_ENABLE_MLIR_PRETTY_FORM_FIELD_NUMBER: builtins.int + XLA_GPU_ALGORITHM_DENYLIST_PATH_FIELD_NUMBER: builtins.int + XLA_TPU_DETECT_NAN_FIELD_NUMBER: builtins.int + XLA_TPU_DETECT_INF_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_XPROF_TRACEME_FIELD_NUMBER: builtins.int + XLA_GPU_UNSAFE_FALLBACK_TO_DRIVER_ON_PTXAS_NOT_FOUND_FIELD_NUMBER: builtins.int + XLA_GPU_ASM_EXTRA_FLAGS_FIELD_NUMBER: builtins.int + XLA_MULTIHEAP_SIZE_CONSTRAINT_PER_HEAP_FIELD_NUMBER: builtins.int + XLA_DETAILED_LOGGING_FIELD_NUMBER: builtins.int + XLA_ENABLE_DUMPING_FIELD_NUMBER: builtins.int + XLA_GPU_FORCE_COMPILATION_PARALLELISM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_LLVM_MODULE_COMPILATION_PARALLELISM_FIELD_NUMBER: builtins.int + XLA_GPU_DETERMINISTIC_OPS_FIELD_NUMBER: builtins.int + XLA_GPU_LLVM_IR_FILE_FIELD_NUMBER: builtins.int + XLA_GPU_DISABLE_ASYNC_COLLECTIVES_FIELD_NUMBER: builtins.int + XLA_GPU_ALL_REDUCE_COMBINE_THRESHOLD_BYTES_FIELD_NUMBER: builtins.int + XLA_GPU_ALL_GATHER_COMBINE_THRESHOLD_BYTES_FIELD_NUMBER: builtins.int + XLA_GPU_REDUCE_SCATTER_COMBINE_THRESHOLD_BYTES_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_ALL_GATHER_COMBINE_BY_DIM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_REDUCE_SCATTER_COMBINE_BY_DIM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_REASSOCIATION_FOR_CONVERTED_AR_FIELD_NUMBER: builtins.int + XLA_GPU_ALL_REDUCE_BLUECONNECT_NUM_DEVICES_PER_HOST_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_WHILE_LOOP_REDUCE_SCATTER_CODE_MOTION_FIELD_NUMBER: builtins.int + XLA_GPU_COLLECTIVE_INFLATION_FACTOR_FIELD_NUMBER: builtins.int + XLA_LLVM_FORCE_INLINE_BEFORE_SPLIT_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUDNN_FRONTEND_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUDNN_FMHA_FIELD_NUMBER: builtins.int + XLA_GPU_FUSED_ATTENTION_USE_CUDNN_RNG_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUDNN_LAYER_NORM_FIELD_NUMBER: builtins.int + XLA_DUMP_DISABLE_METADATA_FIELD_NUMBER: builtins.int + XLA_DUMP_HLO_PIPELINE_RE_FIELD_NUMBER: builtins.int + XLA_GPU_STRICT_CONV_ALGORITHM_PICKER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUSTOM_FUSIONS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUSTOM_FUSIONS_RE_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_DYNAMIC_SLICE_FUSION_FIELD_NUMBER: builtins.int + XLA_GPU_NCCL_TERMINATION_TIMEOUT_SECONDS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_SHARED_CONSTANTS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUBLASLT_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_COMMAND_BUFFER_FIELD_NUMBER: builtins.int + XLA_GPU_GRAPH_MIN_GRAPH_SIZE_FIELD_NUMBER: builtins.int + XLA_GPU_GRAPH_ENABLE_CONCURRENT_REGION_FIELD_NUMBER: builtins.int + XLA_GPU_REDZONE_SCRATCH_MAX_MEGABYTES_FIELD_NUMBER: builtins.int + XLA_GPU_REDZONE_PADDING_BYTES_FIELD_NUMBER: builtins.int + XLA_CPU_USE_ACL_FIELD_NUMBER: builtins.int + XLA_CPU_STRICT_DOT_CONV_MATH_FIELD_NUMBER: builtins.int + XLA_GPU_USE_RUNTIME_FUSION_FIELD_NUMBER: builtins.int + XLA_DUMP_LATENCY_HIDING_SCHEDULE_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_MLIR_TILING_AND_FUSION_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_CUSTOM_MATMUL_TILING_FIELD_NUMBER: builtins.int + XLA_CPU_MATMUL_TILING_M_DIM_FIELD_NUMBER: builtins.int + XLA_CPU_MATMUL_TILING_N_DIM_FIELD_NUMBER: builtins.int + XLA_CPU_MATMUL_TILING_K_DIM_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_MLIR_FUSION_OUTLINING_FIELD_NUMBER: builtins.int + XLA_CPU_ENABLE_EXPERIMENTAL_DEALLOCATION_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_LATENCY_HIDING_SCHEDULER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_HIGHEST_PRIORITY_ASYNC_STREAM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_ANALYTICAL_LATENCY_ESTIMATOR_FIELD_NUMBER: builtins.int + XLA_GPU_LHS_ENABLE_GPU_ASYNC_TRACKER_FIELD_NUMBER: builtins.int + XLA_GPU_PGLE_PROFILE_FILE_OR_DIRECTORY_PATH_FIELD_NUMBER: builtins.int + XLA_GPU_MEMORY_LIMIT_SLOP_FACTOR_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PIPELINED_COLLECTIVES_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PIPELINED_ALL_REDUCE_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PIPELINED_ALL_GATHER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PIPELINED_REDUCE_SCATTER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PIPELINED_P2P_FIELD_NUMBER: builtins.int + XLA_GPU_RUN_POST_LAYOUT_COLLECTIVE_PIPELINER_FIELD_NUMBER: builtins.int + XLA_GPU_COLLECTIVE_PERMUTE_DECOMPOSER_THRESHOLD_FIELD_NUMBER: builtins.int + XLA_PARTITIONING_ALGORITHM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_TRITON_GEMM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUDNN_INT8X32_CONVOLUTION_REORDERING_FIELD_NUMBER: builtins.int + XLA_GPU_TRITON_GEMM_ANY_FIELD_NUMBER: builtins.int + XLA_GPU_EXHAUSTIVE_TILING_SEARCH_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PRIORITY_FUSION_FIELD_NUMBER: builtins.int + XLA_GPU_DUMP_AUTOTUNE_RESULTS_TO_FIELD_NUMBER: builtins.int + XLA_GPU_LOAD_AUTOTUNE_RESULTS_FROM_FIELD_NUMBER: builtins.int + XLA_GPU_TARGET_CONFIG_FILENAME_FIELD_NUMBER: builtins.int + XLA_GPU_AUTO_SPMD_PARTITIONING_MEMORY_BUDGET_GB_FIELD_NUMBER: builtins.int + XLA_GPU_AUTO_SPMD_PARTITIONING_MEMORY_BUDGET_RATIO_FIELD_NUMBER: builtins.int + XLA_GPU_TRITON_GEMM_DISABLE_REDUCED_PRECISION_REDUCTION_FIELD_NUMBER: builtins.int + XLA_GPU_TRITON_FUSION_LEVEL_FIELD_NUMBER: builtins.int + XLA_GPU_DUMP_AUTOTUNED_GEMM_FUSIONS_FIELD_NUMBER: builtins.int + XLA_GPU_OVERRIDE_GEMM_AUTOTUNER_FIELD_NUMBER: builtins.int + XLA_GPU_COPY_INSERTION_USE_REGION_ANALYSIS_FIELD_NUMBER: builtins.int + XLA_GPU_COLLECT_COST_MODEL_STATS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_SPLIT_K_AUTOTUNING_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_REDUCTION_EPILOGUE_FUSION_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_NCCL_CLIQUE_OPTIMIZATION_FIELD_NUMBER: builtins.int + XLA_GPU_MOCK_CUSTOM_CALLS_FIELD_NUMBER: builtins.int + XLA_GPU_CUBLAS_FALLBACK_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_WHILE_LOOP_DOUBLE_BUFFERING_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_WHILE_LOOP_UNROLLING_FIELD_NUMBER: builtins.int + XLA_GPU_ENSURE_MINOR_DOT_CONTRACTION_DIMS_FIELD_NUMBER: builtins.int + XLA_GPU_FILTER_KERNELS_SPILLING_REGISTERS_ON_AUTOTUNING_FIELD_NUMBER: builtins.int + XLA_DEBUG_BUFFER_ASSIGNMENT_SHOW_MAX_FIELD_NUMBER: builtins.int + XLA_GPU_LLVM_VERIFICATION_LEVEL_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_CUB_RADIX_SORT_FIELD_NUMBER: builtins.int + XLA_GPU_THRESHOLD_FOR_WINDOWED_EINSUM_MIB_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_TRITON_HOPPER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_NCCL_USER_BUFFERS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_NCCL_COMM_SPLITTING_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_NCCL_PER_STREAM_COMMS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_LIBNVPTXCOMPILER_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_DOT_STRENGTH_REDUCTION_FIELD_NUMBER: builtins.int + XLA_GPU_MULTI_STREAMED_WINDOWED_EINSUM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_BF16_6WAY_GEMM_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_BF16_3WAY_GEMM_FIELD_NUMBER: builtins.int + XLA_GPU_NCCL_COLLECTIVE_MAX_NCHANNELS_FIELD_NUMBER: builtins.int + XLA_GPU_NCCL_P2P_MAX_NCHANNELS_FIELD_NUMBER: builtins.int + XLA_GPU_MLIR_EMITTER_LEVEL_FIELD_NUMBER: builtins.int + XLA_GPU_GEMM_REWRITE_SIZE_THRESHOLD_FIELD_NUMBER: builtins.int + XLA_GPU_REQUIRE_COMPLETE_AOT_AUTOTUNE_RESULTS_FIELD_NUMBER: builtins.int + XLA_GPU_CUDNN_GEMM_FUSION_LEVEL_FIELD_NUMBER: builtins.int + XLA_GPU_USE_MEMCPY_LOCAL_P2P_FIELD_NUMBER: builtins.int + XLA_GPU_AUTOTUNE_MAX_SOLUTIONS_FIELD_NUMBER: builtins.int + XLA_DUMP_LARGE_CONSTANTS_FIELD_NUMBER: builtins.int + XLA_GPU_VERIFY_TRITON_FUSION_NUMERICS_FIELD_NUMBER: builtins.int + XLA_GPU_DUMP_AUTOTUNE_LOGS_TO_FIELD_NUMBER: builtins.int + XLA_REDUCE_WINDOW_REWRITE_BASE_LENGTH_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_HOST_MEMORY_OFFLOADING_FIELD_NUMBER: builtins.int + XLA_GPU_EXCLUDE_NONDETERMINISTIC_OPS_FIELD_NUMBER: builtins.int + XLA_GPU_NCCL_TERMINATE_ON_ERROR_FIELD_NUMBER: builtins.int + XLA_GPU_SHARD_AUTOTUNING_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_APPROX_COSTLY_COLLECTIVES_FIELD_NUMBER: builtins.int + XLA_GPU_KERNEL_CACHE_FILE_FIELD_NUMBER: builtins.int + XLA_GPU_UNSAFE_PIPELINED_LOOP_ANNOTATOR_FIELD_NUMBER: builtins.int + XLA_GPU_PER_FUSION_AUTOTUNE_CACHE_DIR_FIELD_NUMBER: builtins.int + XLA_CMD_BUFFER_TRACE_CACHE_SIZE_FIELD_NUMBER: builtins.int + XLA_GPU_TEMP_BUFFER_USE_SEPARATE_COLOR_FIELD_NUMBER: builtins.int + LEGACY_COMMAND_BUFFER_CUSTOM_CALL_TARGETS_FIELD_NUMBER: builtins.int + XLA_SYNTAX_SUGAR_ASYNC_OPS_FIELD_NUMBER: builtins.int + XLA_GPU_AUTOTUNE_GEMM_RTOL_FIELD_NUMBER: builtins.int + XLA_ENABLE_COMMAND_BUFFERS_DURING_PROFILING_FIELD_NUMBER: builtins.int + XLA_GPU_CUDNN_GEMM_MAX_PLANS_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_LIBNVJITLINK_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_TRITON_GEMM_INT4_FIELD_NUMBER: builtins.int + XLA_GPU_ASYNC_DOT_FIELD_NUMBER: builtins.int + XLA_GPU_ENABLE_PGLE_ACCURACY_CHECKER_FIELD_NUMBER: builtins.int + XLA_GPU_EXECUTABLE_WARN_STUCK_TIMEOUT_SECONDS_FIELD_NUMBER: builtins.int + XLA_GPU_EXECUTABLE_TERMINATE_TIMEOUT_SECONDS_FIELD_NUMBER: builtins.int + XLA_EXPERIMENTAL_IGNORE_CHANNEL_ID_FIELD_NUMBER: builtins.int + XLA_BACKEND_EXTRA_OPTIONS_FIELD_NUMBER: builtins.int + xla_cpu_enable_concurrency_optimized_scheduler: builtins.bool + """--------------------------------------------------------------------------// + XLA:CPU options. + --------------------------------------------------------------------------// + + go/keep-sorted start newline_separated=yes + + When true, XLA:CPU uses HLO module scheduler that is optimized for + extracting concurrency at the cost of extra memory: we extend the live + ranges of temporaries to allow XLA runtime to schedule independent + operations in parallel on separate threads. + """ + xla_cpu_enable_fast_math: builtins.bool + """When true, "unsafe" mathematical optimizations are enabled. These + transformations include but are not limited to: + + - Reducing the precision of operations (e.g. using an approximate sin + function, or transforming x/y into x * (1/y)). + - Assuming that operations never produce or consume NaN or +/- Inf (this + behavior can be adjusted using xla_cpu_fast_math_allow_{nans|infs}). + - Assuming that +0 and -0 are indistinguishable. + """ + xla_cpu_enable_fast_min_max: builtins.bool + """When false we lower the Minimum and Maximum hlos in the CPU backend such + that Min(NotNaN, NaN) = Min(NaN, NotNaN) = NaN. In other words, if flag + this is false we always propagate NaNs through Min and Max. + + Note, this does not correspond to the exact same behavior as the gpu flag + below! + """ + xla_cpu_fast_math_honor_division: builtins.bool + """When xla_cpu_enable_fast_math is true then this controls whether we forbid + to use the reciprocal of an argument instead of division. Ignored when + xla_cpu_enable_fast_math is false. + """ + xla_cpu_fast_math_honor_functions: builtins.bool + """When xla_cpu_enable_fast_math is true then this controls whether we forbid + to approximate calculations for functions. Ignored when + xla_cpu_enable_fast_math is false. + """ + xla_cpu_fast_math_honor_infs: builtins.bool + """When xla_cpu_enable_fast_math is true then this controls whether we allow + operations to produce infinites. Ignored when xla_cpu_enable_fast_math is + false. + """ + xla_cpu_fast_math_honor_nans: builtins.bool + """When xla_cpu_enable_fast_math is true then this controls whether we allow + operations to produce NaNs. Ignored when xla_cpu_enable_fast_math is + false. + """ + xla_cpu_use_thunk_runtime: builtins.bool + """When true, XLA:CPU uses the thunk runtime to execute compiled program.""" + xla_cpu_parallel_codegen_split_count: builtins.int + """The number of parts to split the LLVM module into before codegen. This + allows XLA to compile all parts in parallel, and resolve kernel symbols + from different dynamic libraries. + """ + xla_cpu_prefer_vector_width: builtins.int + """A `prefer-vector-width` value that is passed to the LLVM backend. Default + value is `256` (AVX2 on x86 platforms). + """ + xla_gpu_experimental_autotune_cache_mode: global___DebugOptions.AutotuneCacheMode.ValueType + """--------------------------------------------------------------------------// + XLA:GPU options. + --------------------------------------------------------------------------// + go/keep-sorted start newline_separated=yes skip_lines=1 + + Specifies the behavior of per kernel autotuning cache. + """ + xla_gpu_experimental_disable_binary_libraries: builtins.bool + """Experimentally disables binary libraries in GPU compiler passes.""" + xla_gpu_experimental_enable_triton_softmax_priority_fusion: builtins.bool + """Gates the experimental feature coupling the Triton Softmax pattern matcher + with priority fusion. + """ + xla_gpu_unsupported_enable_triton_gemm: builtins.bool + """Internal debug/testing flag to switch Triton GEMM fusions on or off.""" + xla_hlo_graph_addresses: builtins.bool + """--------------------------------------------------------------------------// + A bag of XLA options that have to be categorized. + --------------------------------------------------------------------------// + + Show addresses of HLO ops in graph dump. + """ + xla_hlo_profile: builtins.bool + """Instrument the computation to collect per-HLO cycle counts.""" + xla_disable_all_hlo_passes: builtins.bool + """Disables all HLO passes. Notes that some passes are necessary for + correctness and the invariants that must be satisfied by "fully optimized" + HLO are different for different devices and may change over time. The only + "guarantee", such as it is, is that if you compile XLA and dump the + optimized HLO for some graph, you should be able to run it again on the + same device with the same build of XLA. + """ + xla_backend_optimization_level: builtins.int + """Numerical optimization level for the XLA compiler backend; the specific + interpretation of this value is left to the backends. + """ + xla_embed_ir_in_executable: builtins.bool + """Embed the compiler IR as a string in the executable.""" + xla_eliminate_hlo_implicit_broadcast: builtins.bool + """Eliminate implicit broadcasts when lowering user computations to HLO + instructions; use explicit broadcast instead. + """ + xla_cpu_multi_thread_eigen: builtins.bool + """When generating calls to Eigen in the CPU backend, use multi-threaded Eigen + mode. + """ + xla_gpu_cuda_data_dir: builtins.str + """Path to directory with cuda/ptx tools and libraries.""" + xla_gpu_ftz: builtins.bool + """Enable flush-to-zero semantics in the GPU backend.""" + xla_llvm_enable_alias_scope_metadata: builtins.bool + """If true, in LLVM-based backends, emit !alias.scope metadata in + generated IR. + """ + xla_llvm_enable_noalias_metadata: builtins.bool + """If true, in LLVM-based backends, emit !noalias metadata in the + generated IR. + """ + xla_llvm_enable_invariant_load_metadata: builtins.bool + """If true, in LLVM-based backends, emit !invariant.load metadata in + the generated IR. + """ + xla_llvm_disable_expensive_passes: builtins.bool + """If true, a set of expensive LLVM optimization passes will not be run.""" + xla_test_all_output_layouts: builtins.bool + """This is used by ClientLibraryTestBase::ComputeAndCompare*. If true, the + computation will run n! times with all permunations of layouts for the + output shape in rank n. For example, with a 3D shape, all permutations of + the set {0, 1, 2} are tried. + """ + xla_test_all_input_layouts: builtins.bool + """This is used by ClientLibraryTestBase::ComputeAndCompare*. If true, the + computation will run for all permunations of layouts of all input + arguments. For example, with 2 input arguments in 2D and 4D shapes, the + computation will run 2! * 4! times. + """ + xla_hlo_graph_sharding_color: builtins.bool + """Assign colors based on sharding information when generating the Graphviz + HLO graph. + """ + xla_cpu_use_mkl_dnn: builtins.bool + """Generate calls to MKL-DNN in the CPU backend.""" + xla_gpu_enable_fast_min_max: builtins.bool + """When true we lower the Minimum and Maximum hlos in the GPU backend such + that Min(NotNaN, NaN) = Min(NaN, NotNaN) = NotNaN. In other words, if flag + this is true we don't propagate NaNs through Min and Max. + + Note, this does not correspond to the exact same behavior as the cpu flag + above! + """ + xla_allow_excess_precision: builtins.bool + """Allows xla to increase the output precision of floating point operations + and all floating-point conversions to be simplified, including those + that affect the numerics. The `FloatNormalization` pass inserts many + `f32 -> bf16 -> f32` conversion pairs. These are not removed by the + `AlgebraicSimplifier`, as that will only simplify conversions that are + no-ops, e.g. `bf16 -> f32 -> bf16`. Removing these improves accuracy. + """ + xla_gpu_crash_on_verification_failures: builtins.bool + """Crashes the program when any kind of verification fails, instead of just + logging the failures. One example is cross checking of convolution results + among different algorithms. + """ + xla_gpu_autotune_level: builtins.int + """0: Disable gemm and convolution autotuning. + 1: Enable autotuning, but disable correctness checking. + 2: Also set output buffers to random numbers during autotuning. + 3: Also reset output buffers to random numbers after autotuning each + algorithm. + 4+: Also check for correct outputs and for out-of-bounds reads/writes. + + Default: 4. + """ + xla_force_host_platform_device_count: builtins.int + """Force the host platform to pretend that there are these many host + "devices". All these devices are backed by the same threadpool. Defaults + to 1. + + Setting this to anything other than 1 can increase overhead from context + switching but we let the user override this behavior to help run tests on + the host that run models in parallel across multiple devices. + """ + xla_gpu_disable_gpuasm_optimizations: builtins.bool + """If set to true XLA:GPU invokes `ptxas` with -O0 (default is -O3).""" + xla_gpu_shape_checks: global___DebugOptions.ShapeChecks.ValueType + xla_hlo_evaluator_use_fast_path: builtins.bool + """Enable fast math with eigen in the HLO evaluator.""" + xla_allow_scalar_index_dynamic_ops: builtins.bool + """Temporary option to allow support for both the R1 and the scalar index + versions of DynamicSlice and DynamicUpdateSlice. Only used for testing. + """ + xla_step_marker_location: global___DebugOptions.StepMarkerLocation.ValueType + """Option to emit a target-specific marker to indicate the start of a training + step. The location of the marker (if any) is determined by the option + value. + """ + xla_dump_to: builtins.str + """ + BEGIN flags controlling dumping HLO modules for debugging. + + When dumping is enabled, HLO modules dumped at the very beginning and end + of compilation, and optionally also during the pass pipeline. + + In general, if you set one of these flags, we will try to infer reasonable + defaults for the others. For example: + + * Setting --xla_dump_to=/tmp/foo without specifying a format + with --xla_dump_hlo_as_* will turn on --xla_dump_hlo_as_text. + + * Setting --xla_dump_hlo_as_text without specifying --xla_dump_to will + dump to stdout. + + Directory to dump into. + """ + xla_dump_hlo_module_re: builtins.str + """If specified, will only dump modules which match this regexp.""" + xla_dump_hlo_pass_re: builtins.str + """If this flag is specified, will also dump HLO before and after passes that + match this regular expression. Set to .* to dump before/after all passes. + """ + xla_dump_hlo_as_text: builtins.bool + """Specifies the format that HLO is dumped in. Multiple of these may be + specified. + """ + xla_dump_hlo_as_proto: builtins.bool + xla_dump_hlo_as_dot: builtins.bool + xla_dump_hlo_as_url: builtins.bool + xla_dump_hlo_as_html: builtins.bool + """Dump HLO graphs as an HTML (DOT -> SVG inlined in HTML)""" + xla_dump_fusion_visualization: builtins.bool + """Dump the visualization of the fusion progress.""" + xla_dump_hlo_snapshots: builtins.bool + """If true, every time an HLO module is run, we will dump an HloSnapshot + (essentially, a serialized module plus its inputs) to the --xla_dump_to + directory. + """ + xla_dump_include_timestamp: builtins.bool + """Include a timestamp in the dumped filenames.""" + xla_dump_max_hlo_modules: builtins.int + """Max number of hlo module dumps in a directory. Set to < 0 for unbounded.""" + xla_dump_module_metadata: builtins.bool + """Dump HloModuleMetadata as a text proto for each HLO module.""" + xla_dump_compress_protos: builtins.bool + """GZip-compress protos dumped via --xla_dump_hlo_as_proto.""" + xla_dump_hlo_as_long_text: builtins.bool + """Dump HLO in long text format. Ignored unless xla_dump_hlo_as_text is true.""" + xla_gpu_force_conv_nchw: builtins.bool + """ + END flags controlling dumping HLO modules. + + Overrides for XLA GPU's convolution layout heuristic. + """ + xla_gpu_force_conv_nhwc: builtins.bool + xla_gpu_dump_llvmir: builtins.bool + """Whether to dump llvm ir when compiling to ptx.""" + xla_dump_enable_mlir_pretty_form: builtins.bool + """Whether to dump mlir using pretty print form.""" + xla_gpu_algorithm_denylist_path: builtins.str + """Denylist for cuDNN convolutions.""" + xla_tpu_detect_nan: builtins.bool + """Debug options that trigger execution errors when NaN or Inf are detected.""" + xla_tpu_detect_inf: builtins.bool + xla_cpu_enable_xprof_traceme: builtins.bool + """True if TraceMe annotations are enabled for XLA:CPU.""" + xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found: builtins.bool + """It is usually preferable to not fallback to the driver; it can consume more + memory, or have bugs. + """ + xla_gpu_asm_extra_flags: builtins.str + """Extra parameters to pass the GPU assembler.""" + xla_multiheap_size_constraint_per_heap: builtins.int + """Per-heap size constraint. New heaps will be created if per-heap max size is + reached. + """ + xla_detailed_logging: builtins.bool + """Enable detailed logging into vlog. If this is disabled, no + compilation summary will be printed in the end of computation. + """ + xla_enable_dumping: builtins.bool + """Enable HLO dumping. If this is disabled, no HLO modules will be dumped.""" + xla_gpu_force_compilation_parallelism: builtins.int + """Overrides normal multi-threaded compilation setting to use this many + threads. Setting to 0 (the default value) means no enforcement. + """ + xla_gpu_enable_llvm_module_compilation_parallelism: builtins.bool + xla_gpu_deterministic_ops: builtins.bool + """Guarantees run-to-run determinism. + This flag implies --xla_gpu_exclude_nondeterministic_ops and in addition + disables autotuning. + """ + xla_gpu_all_reduce_combine_threshold_bytes: builtins.int + """Size threshold (in bytes) for the GPU collective combiners.""" + xla_gpu_all_gather_combine_threshold_bytes: builtins.int + xla_gpu_reduce_scatter_combine_threshold_bytes: builtins.int + xla_gpu_enable_all_gather_combine_by_dim: builtins.bool + """Combine all-gather/scatter-reduce ops with the same dimension or + irrespective of their dimension. + """ + xla_gpu_enable_reduce_scatter_combine_by_dim: builtins.bool + xla_gpu_enable_reassociation_for_converted_ar: builtins.bool + """Enable allreduce reassociation on allreduces that are converted to a wider + type. The resulting allreduce will be promoted to a wider-typed allreduce. + """ + xla_gpu_all_reduce_blueconnect_num_devices_per_host: builtins.int + """Number of devices per host for first stage of BlueConnect decomposition + pass. The pass will attempt to decompose all-reduces ops into a + ReduceScatter-AllReduce-AllGather sequence, with the initial ReduceScatter + being performed over all of the devices in the same host. Set to < 1 to + disable all-reduce decomposition. + """ + xla_gpu_enable_while_loop_reduce_scatter_code_motion: builtins.bool + """Enable hoisting of reduce-scatter out of while loops.""" + xla_gpu_collective_inflation_factor: builtins.int + """Inflate collective cost by running each collective multiple times.""" + xla_llvm_force_inline_before_split: builtins.bool + """Whether to force inline before llvm module split to get a more balanced + splits for parallel compilation. + """ + xla_gpu_enable_cudnn_frontend: builtins.bool + """Whether to use the cuDNN frontend API for convolutions when possible.""" + xla_gpu_enable_cudnn_fmha: builtins.bool + xla_gpu_fused_attention_use_cudnn_rng: builtins.bool + xla_gpu_enable_cudnn_layer_norm: builtins.bool + """Rewrite layer norm patterns into cuDNN library calls.""" + xla_dump_disable_metadata: builtins.bool + """Disable dumping metadata in HLO dumps.""" + xla_dump_hlo_pipeline_re: builtins.str + """If this flag is specified, will only dump HLO before and after passes in + the pass pipeline that matches this regular expression. Default empty value + enables dumping in all pipelines. + """ + xla_gpu_strict_conv_algorithm_picker: builtins.bool + """If true, abort immediately when conv algorithm picker fails, rather than + logging a warning and proceeding with fallback. + """ + xla_gpu_enable_custom_fusions: builtins.bool + """If true, XLA will try to pattern match subgraphs of HLO operations into + custom fusions registered in the current process (pre-compiled hand written + kernels, e.g. various GEMM fusions writtent in CUTLASS). + """ + xla_gpu_enable_custom_fusions_re: builtins.str + """A regular expression enabling only a subset of custom fusions. Enabled only + if `xla_gpu_enable_custom_fusion` set to true. + """ + xla_gpu_enable_dynamic_slice_fusion: builtins.bool + """Enables address computation fusion to optimize dynamic-slice and + dynamic-update-slice operations around library calls. + """ + xla_gpu_nccl_termination_timeout_seconds: builtins.int + """Timeout in seconds before terminating jobs that are stuck in a NCCL + Rendezvous. Negative value disables the timeout and will not terminate. + """ + xla_gpu_enable_shared_constants: builtins.bool + """Enables shared constants for XLA/GPU. This allows large constants to be + shared among multiple GPU executables. + """ + xla_gpu_enable_cublaslt: builtins.bool + """Whether to use cuBLASLt for GEMMs on GPUs.""" + xla_gpu_graph_min_graph_size: builtins.int + """This number determines how many moved instructions like fusion kernels are + required for a region to be captured as a function to be launched as a GPU + graph. + """ + xla_gpu_graph_enable_concurrent_region: builtins.bool + """Identify concurrent regions in GPU graphs and execute them concurrently.""" + xla_gpu_redzone_scratch_max_megabytes: builtins.int + """Size threshold (in megabytes) for the GPU redzone scratch allocator.""" + xla_gpu_redzone_padding_bytes: builtins.int + """Amount of padding the redzone allocator will put on one side of each buffer + it allocates. (So the buffer's total size will be increased by 2x this + value.) + + Higher values make it more likely that we'll catch an out-of-bounds read or + write. Smaller values consume less memory during autotuning. Note that a + fused cudnn conv has up to 6 total buffers (4 inputs, 1 output, and 1 + scratch), so this can be multiplied by quite a lot. + """ + xla_cpu_use_acl: builtins.bool + """Generate calls to Arm Compute Library in the CPU backend.""" + xla_cpu_strict_dot_conv_math: builtins.bool + """By default, XLA:CPU will run fp16 dot/conv as fp32, as this is generally + (much) faster on our hardware. Set this flag to disable this behavior. + """ + xla_gpu_use_runtime_fusion: builtins.bool + """An option to enable using cuDNN runtime compiled fusion kernels which is + available and recommended for Ampere+ GPUs. + """ + xla_dump_latency_hiding_schedule: builtins.bool + xla_cpu_enable_mlir_tiling_and_fusion: builtins.bool + """By default, MLIR lowering will use Linalg elementwise fusion. If this flag + is enabled, the pipeline will use tiling, fusion, peeling, vectorization + instead. + """ + xla_cpu_enable_custom_matmul_tiling: builtins.bool + """XLA:CPU-Next tiling parameters for matmul.""" + xla_cpu_matmul_tiling_m_dim: builtins.int + xla_cpu_matmul_tiling_n_dim: builtins.int + xla_cpu_matmul_tiling_k_dim: builtins.int + xla_cpu_enable_mlir_fusion_outlining: builtins.bool + xla_cpu_enable_experimental_deallocation: builtins.bool + """If set, use the experimental deallocation pass from mlir-hlo.""" + xla_gpu_enable_latency_hiding_scheduler: builtins.bool + xla_gpu_enable_highest_priority_async_stream: builtins.bool + xla_gpu_enable_analytical_latency_estimator: builtins.bool + xla_gpu_lhs_enable_gpu_async_tracker: builtins.bool + xla_gpu_pgle_profile_file_or_directory_path: builtins.str + xla_gpu_memory_limit_slop_factor: builtins.int + xla_gpu_enable_pipelined_collectives: builtins.bool + xla_gpu_enable_pipelined_all_reduce: builtins.bool + xla_gpu_enable_pipelined_all_gather: builtins.bool + xla_gpu_enable_pipelined_reduce_scatter: builtins.bool + xla_gpu_enable_pipelined_p2p: builtins.bool + xla_gpu_run_post_layout_collective_pipeliner: builtins.bool + xla_gpu_collective_permute_decomposer_threshold: builtins.int + """The minimum data size in bytes to trigger collective-permute-decomposer + transformation. + """ + xla_partitioning_algorithm: global___DebugOptions.PartitioningAlgorithm.ValueType + """The partitioning algorithm to be used in the PartitionAssignment pass.""" + xla_gpu_enable_triton_gemm: builtins.bool + xla_gpu_enable_cudnn_int8x32_convolution_reordering: builtins.bool + xla_gpu_triton_gemm_any: builtins.bool + """Creates triton fusion for all supported gemms. + To make sure only triton gemm is chosen by the autotuner run with + `xla_gpu_cublas_fallback` set to false. + """ + xla_gpu_exhaustive_tiling_search: builtins.bool + xla_gpu_enable_priority_fusion: builtins.bool + xla_gpu_dump_autotune_results_to: builtins.str + """File to write autotune results to. It will be a binary file unless the name + ends with .txt or .textproto. Warning: The results are written at every + compilation, possibly multiple times per process. This only works on CUDA. + """ + xla_gpu_load_autotune_results_from: builtins.str + """File to load autotune results from. It will be considered a binary file + unless the name ends with .txt or .textproto. At most one loading will + happen during the lifetime of one process, even if the first one is + unsuccessful or different file paths are passed here. This only works on + CUDA. + """ + xla_gpu_target_config_filename: builtins.str + """Description of the target platform in GpuTargetConfigProto format; if + provided, deviceless compilation is assumed, and the current device is + ignored. + """ + xla_gpu_auto_spmd_partitioning_memory_budget_gb: builtins.int + """Memory budget in GB per device for AutoSharding.""" + xla_gpu_auto_spmd_partitioning_memory_budget_ratio: builtins.float + """See the definition of the + xla_gpu_auto_spmd_partitioning_memory_budget_ratio flag for the meaning of + this field. + """ + xla_gpu_triton_gemm_disable_reduced_precision_reduction: builtins.bool + xla_gpu_triton_fusion_level: builtins.int + xla_gpu_dump_autotuned_gemm_fusions: builtins.bool + xla_gpu_override_gemm_autotuner: builtins.str + xla_gpu_copy_insertion_use_region_analysis: builtins.bool + xla_gpu_collect_cost_model_stats: builtins.bool + """If true, each fusion instruction will have a cost model runtime estimate in + backend config after compilation. + """ + xla_gpu_enable_split_k_autotuning: builtins.bool + xla_gpu_enable_reduction_epilogue_fusion: builtins.bool + """Whether reduction epilogue fusion is enabled in fusion passes.""" + xla_gpu_enable_nccl_clique_optimization: builtins.bool + """Allow early return when acquiring NCCL cliques.""" + xla_gpu_mock_custom_calls: builtins.bool + """Replace custom calls with noop operations.""" + xla_gpu_cublas_fallback: builtins.bool + """Allow Triton GEMM autotuning to fall back to cuBLAS when that is + faster. + """ + xla_gpu_enable_while_loop_double_buffering: builtins.bool + """Enable double buffering for loops.""" + xla_gpu_enable_while_loop_unrolling: global___DebugOptions.WhileLoopUnrolling.ValueType + """Determine the while loop unrolling scheme.""" + xla_gpu_ensure_minor_dot_contraction_dims: builtins.bool + """Change the layout of the second triton dot operand to be column major. + Only works for (bf16 x bf16) -> bf16. + """ + xla_gpu_filter_kernels_spilling_registers_on_autotuning: builtins.bool + """Filter out kernels that spill registers during autotuning.""" + xla_debug_buffer_assignment_show_max: builtins.int + """Maximum number of buffers to print when debugging buffer assignment.""" + xla_gpu_llvm_verification_level: builtins.int + xla_gpu_enable_cub_radix_sort: builtins.bool + """Enable radix sort using CUB.""" + xla_gpu_threshold_for_windowed_einsum_mib: builtins.int + """Threshold to enable windowed einsum (collective matmul) in MB.""" + xla_gpu_enable_triton_hopper: builtins.bool + """Enables currently disabled features within Triton for Hopper.""" + xla_gpu_enable_nccl_user_buffers: builtins.bool + """Enable NCCL user buffers.""" + xla_gpu_enable_nccl_comm_splitting: builtins.bool + """Enable NCCL communicator splitting.""" + xla_gpu_enable_nccl_per_stream_comms: builtins.bool + """Enable NCCL per stream communicators.""" + xla_gpu_enable_libnvptxcompiler: builtins.bool + """If enabled, uses the libnvptxcompiler library to compile PTX to cuBIN.""" + xla_gpu_enable_dot_strength_reduction: builtins.bool + xla_gpu_multi_streamed_windowed_einsum: builtins.bool + """Whether to use multiple compute streams to run windowed einsum.""" + xla_gpu_enable_bf16_6way_gemm: builtins.bool + """If enabled, uses bf16_6way gemm to compute F32 gemm.""" + xla_gpu_enable_bf16_3way_gemm: builtins.bool + """If enabled, uses bf16_3way gemm to compute F32 gemm.""" + xla_gpu_nccl_collective_max_nchannels: builtins.int + """Specify the maximum number of channels(SMs) NCCL + will use for collective operations. + """ + xla_gpu_nccl_p2p_max_nchannels: builtins.int + """Specify the maximum number of channels(SMs) NCCL + will use for p2p operations. + """ + xla_gpu_mlir_emitter_level: builtins.int + """Choose the level of mlir emitters that are enabled. + Current levels: + 0: Disabled. + 1: Loop emitter + 2: + Loop-like emitters + 3: + Transpose + 4: + Reduce + """ + xla_gpu_gemm_rewrite_size_threshold: builtins.int + """Threshold to rewrite matmul to cuBLAS or Triton (minimum combined number of + elements of both matrices in non-batch dimensions to be considered for a + rewrite). + """ + xla_gpu_require_complete_aot_autotune_results: builtins.bool + """If true, will require complete AOT autotuning results; in the case of + missing AOT result, the model will not be compiled or executed, a + `NotFound` error will be returned. + """ + xla_gpu_cudnn_gemm_fusion_level: builtins.int + """Let GEMM fusion autotuning probe cuDNN as a backend. + Current levels: + 0: Disabled. + 1: Fusions of GEMM, elementwise, transpose/reshape operations. + 2: + Broadcasts, slicing. + 3: + Nontrivial noncontracting dimension reshapes/transposes. + """ + xla_gpu_use_memcpy_local_p2p: builtins.bool + """This instructs the runtime whether to use + memcpy for p2p communication when source and + target are located within a node(nvlink). + """ + xla_gpu_autotune_max_solutions: builtins.int + """If non-zero, limits the number of solutions to be used by GEMM autotuner. + This might be useful if underlying math library returns too many GEMM + solutions. + """ + xla_dump_large_constants: builtins.bool + """If true, large constants will be printed out when dumping HLOs.""" + xla_gpu_verify_triton_fusion_numerics: builtins.bool + """If true, will verify that the numerical results of Triton fusions match + the results of regular emitters. + """ + xla_gpu_dump_autotune_logs_to: builtins.str + """File to write autotune logs to. It will stored in txt format.""" + xla_reduce_window_rewrite_base_length: builtins.int + """Base length to rewrite the reduce window to, no rewrite if set to 0.""" + xla_gpu_enable_host_memory_offloading: builtins.bool + """If true, will enable host memory offloading on a device.""" + xla_gpu_exclude_nondeterministic_ops: builtins.bool + """Excludes non-deterministic ops from compiled executables. + Unlike --xla_gpu_deterministic_ops does not disable autotuning - the + compilation itself can be non-deterministic. + At present, the HLO op SelectAndScatter does not have a + deterministic XLA:GPU implementation. + Compilation errors out if SelectAndScatter is encountered. + Scatter ops can non-deterministic by default; these get converted to + a deterministic implementation. + """ + xla_gpu_nccl_terminate_on_error: builtins.bool + """If true, Nccl errors will terminate the process.""" + xla_gpu_shard_autotuning: builtins.bool + xla_gpu_enable_approx_costly_collectives: builtins.bool + xla_gpu_kernel_cache_file: builtins.str + xla_gpu_unsafe_pipelined_loop_annotator: builtins.bool + """Recognises rotate-right patterns (slice, slice, concat) within a while + loop and labels the while loop as a pipelined while loop. This is an + unsafe flag. + """ + xla_gpu_per_fusion_autotune_cache_dir: builtins.str + xla_cmd_buffer_trace_cache_size: builtins.int + """The command buffer trace cache size, increasing the cache size may + sometimes reduces the chances of doing command buffer tracing for + updating command buffer instance. + """ + xla_gpu_temp_buffer_use_separate_color: builtins.bool + """Enable this flag will use a separate memory space color for + temp buffer, and then will use separate memory allocator to allocate it, + as there is no other memory allocation interference, + it will allocate temp buffer to some fix address on every iteration, + which is good for cuda-graph perf. + """ + xla_syntax_sugar_async_ops: builtins.bool + """This flag is used for controlling HLO dumping and NVTX marker. If turned + on, both HLO dumping and NVTX marker will use syntactic sugar wrappers + as op names, while the actual op names will be shown if turned off. + + Here is an example HLO excerpt with the flag off: + + async_computation { + param_0 = f32[1,4,8]{1,0,2} parameter(0) + ROOT all-to-all.3.1 = f32[1,4,8]{1,0,2} all-to-all(param_0), + replica_groups={{0,1,2,3,4,5,6,7}}, dimensions={2} + } + ... + + all-to-all-start = + ((f32[1,4,8]{1,0,2}), f32[1,4,8]{1,0,2}) async-start(bitcast.24.0), + calls=async_computation, backend_config={...} + all-to-all-done = f32[1,4,8]{1,0,2} async-done(all-to-all-start) + + and with the flag on: + + all-to-all-start = ((f32[1,4,8]{1,0,2}), f32[1,4,8]{1,0,2}) + all-to-all-start(bitcast.24.0), + replica_groups={{0,1,2,3,4,5,6,7}}, dimensions={2}, + backend_config={...} + all-to-all-done = f32[1,4,8]{1,0,2} all-to-all-done(all-to-all-start) + """ + xla_gpu_autotune_gemm_rtol: builtins.float + """Relative precision for comparing different GEMM solutions""" + xla_enable_command_buffers_during_profiling: builtins.bool + """Allow launching command buffers while profiling active. + When disabled, execute in op-by-op mode. + TODO(b/355487968): Remove this option when validation complete. + """ + xla_gpu_cudnn_gemm_max_plans: builtins.int + """Limit for the number of kernel configurations (plans) to use during + autotuning of cuDNN GEMM fusions. The more - the slower the autotuning + but potentially higher the performance. + """ + xla_gpu_enable_libnvjitlink: builtins.bool + """If enabled, uses the libnvjitlink library for PTX compilation and linking""" + xla_gpu_enable_triton_gemm_int4: builtins.bool + """If enabled, generates triton gemm kernels for int4 inputs.""" + xla_gpu_async_dot: builtins.bool + """If true, XLA will wrap `dot` operations into async computations in an + effort to parallelize matrix operations. + """ + xla_gpu_enable_pgle_accuracy_checker: builtins.bool + """Enables strict PGLE checking. If an FDO profile is specified and latency + hiding scheduler encounters missing instructions in the profile + compilation will halt. + """ + xla_gpu_executable_warn_stuck_timeout_seconds: builtins.int + """Timeouts for RendezvousSingle stuck warning and termination.""" + xla_gpu_executable_terminate_timeout_seconds: builtins.int + xla_experimental_ignore_channel_id: builtins.bool + """Whether to ignore channel ids(including verifier channel id checks) + for collectives in the given HLO. + """ + @property + def xla_disable_hlo_passes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of HLO passes to disable/enable. These names must exactly match the + pass names as specified by the HloPassInterface::name() method. + + At least one of xla_disable_hlo_passes and xla_enable_hlo_passes_only must + be empty. + """ + + @property + def xla_enable_hlo_passes_only(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def xla_gpu_ptx_file(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Paths to files with ptx code.""" + + @property + def xla_gpu_llvm_ir_file(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Paths to files with LLVM code.""" + + @property + def xla_gpu_disable_async_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CollectiveOpType.ValueType]: ... + @property + def xla_gpu_enable_command_buffer( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___DebugOptions.CommandBufferCmdType.ValueType]: + """Determine the types of commands that are recorded into command buffers.""" + + @property + def legacy_command_buffer_custom_call_targets( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Custom call targets with legacy registry API (non FFI API), + that support recording to command buffer custom command, + i.e., custom call target supports cuda-graph capturing for CUDA devices. + This flag is read if CUSTOM_CALL command type is recorded into + command buffer. + """ + + @property + def xla_backend_extra_options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Next id: 331 + + Extra options to pass to the compilation backend (e.g. LLVM); specific + interpretation of these values is left to the backend. + """ + + def __init__( + self, + *, + xla_cpu_enable_concurrency_optimized_scheduler: builtins.bool | None = ..., + xla_cpu_enable_fast_math: builtins.bool | None = ..., + xla_cpu_enable_fast_min_max: builtins.bool | None = ..., + xla_cpu_fast_math_honor_division: builtins.bool | None = ..., + xla_cpu_fast_math_honor_functions: builtins.bool | None = ..., + xla_cpu_fast_math_honor_infs: builtins.bool | None = ..., + xla_cpu_fast_math_honor_nans: builtins.bool | None = ..., + xla_cpu_use_thunk_runtime: builtins.bool | None = ..., + xla_cpu_parallel_codegen_split_count: builtins.int | None = ..., + xla_cpu_prefer_vector_width: builtins.int | None = ..., + xla_gpu_experimental_autotune_cache_mode: global___DebugOptions.AutotuneCacheMode.ValueType | None = ..., + xla_gpu_experimental_disable_binary_libraries: builtins.bool | None = ..., + xla_gpu_experimental_enable_triton_softmax_priority_fusion: builtins.bool | None = ..., + xla_gpu_unsupported_enable_triton_gemm: builtins.bool | None = ..., + xla_hlo_graph_addresses: builtins.bool | None = ..., + xla_hlo_profile: builtins.bool | None = ..., + xla_disable_hlo_passes: collections.abc.Iterable[builtins.str] | None = ..., + xla_enable_hlo_passes_only: collections.abc.Iterable[builtins.str] | None = ..., + xla_disable_all_hlo_passes: builtins.bool | None = ..., + xla_backend_optimization_level: builtins.int | None = ..., + xla_embed_ir_in_executable: builtins.bool | None = ..., + xla_eliminate_hlo_implicit_broadcast: builtins.bool | None = ..., + xla_cpu_multi_thread_eigen: builtins.bool | None = ..., + xla_gpu_cuda_data_dir: builtins.str | None = ..., + xla_gpu_ftz: builtins.bool | None = ..., + xla_llvm_enable_alias_scope_metadata: builtins.bool | None = ..., + xla_llvm_enable_noalias_metadata: builtins.bool | None = ..., + xla_llvm_enable_invariant_load_metadata: builtins.bool | None = ..., + xla_llvm_disable_expensive_passes: builtins.bool | None = ..., + xla_test_all_output_layouts: builtins.bool | None = ..., + xla_test_all_input_layouts: builtins.bool | None = ..., + xla_hlo_graph_sharding_color: builtins.bool | None = ..., + xla_cpu_use_mkl_dnn: builtins.bool | None = ..., + xla_gpu_enable_fast_min_max: builtins.bool | None = ..., + xla_allow_excess_precision: builtins.bool | None = ..., + xla_gpu_crash_on_verification_failures: builtins.bool | None = ..., + xla_gpu_autotune_level: builtins.int | None = ..., + xla_force_host_platform_device_count: builtins.int | None = ..., + xla_gpu_disable_gpuasm_optimizations: builtins.bool | None = ..., + xla_gpu_shape_checks: global___DebugOptions.ShapeChecks.ValueType | None = ..., + xla_hlo_evaluator_use_fast_path: builtins.bool | None = ..., + xla_allow_scalar_index_dynamic_ops: builtins.bool | None = ..., + xla_step_marker_location: global___DebugOptions.StepMarkerLocation.ValueType | None = ..., + xla_dump_to: builtins.str | None = ..., + xla_dump_hlo_module_re: builtins.str | None = ..., + xla_dump_hlo_pass_re: builtins.str | None = ..., + xla_dump_hlo_as_text: builtins.bool | None = ..., + xla_dump_hlo_as_proto: builtins.bool | None = ..., + xla_dump_hlo_as_dot: builtins.bool | None = ..., + xla_dump_hlo_as_url: builtins.bool | None = ..., + xla_dump_hlo_as_html: builtins.bool | None = ..., + xla_dump_fusion_visualization: builtins.bool | None = ..., + xla_dump_hlo_snapshots: builtins.bool | None = ..., + xla_dump_include_timestamp: builtins.bool | None = ..., + xla_dump_max_hlo_modules: builtins.int | None = ..., + xla_dump_module_metadata: builtins.bool | None = ..., + xla_dump_compress_protos: builtins.bool | None = ..., + xla_dump_hlo_as_long_text: builtins.bool | None = ..., + xla_gpu_force_conv_nchw: builtins.bool | None = ..., + xla_gpu_force_conv_nhwc: builtins.bool | None = ..., + xla_gpu_ptx_file: collections.abc.Iterable[builtins.str] | None = ..., + xla_gpu_dump_llvmir: builtins.bool | None = ..., + xla_dump_enable_mlir_pretty_form: builtins.bool | None = ..., + xla_gpu_algorithm_denylist_path: builtins.str | None = ..., + xla_tpu_detect_nan: builtins.bool | None = ..., + xla_tpu_detect_inf: builtins.bool | None = ..., + xla_cpu_enable_xprof_traceme: builtins.bool | None = ..., + xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found: builtins.bool | None = ..., + xla_gpu_asm_extra_flags: builtins.str | None = ..., + xla_multiheap_size_constraint_per_heap: builtins.int | None = ..., + xla_detailed_logging: builtins.bool | None = ..., + xla_enable_dumping: builtins.bool | None = ..., + xla_gpu_force_compilation_parallelism: builtins.int | None = ..., + xla_gpu_enable_llvm_module_compilation_parallelism: builtins.bool | None = ..., + xla_gpu_deterministic_ops: builtins.bool | None = ..., + xla_gpu_llvm_ir_file: collections.abc.Iterable[builtins.str] | None = ..., + xla_gpu_disable_async_collectives: ( + collections.abc.Iterable[global___DebugOptions.CollectiveOpType.ValueType] | None + ) = ..., + xla_gpu_all_reduce_combine_threshold_bytes: builtins.int | None = ..., + xla_gpu_all_gather_combine_threshold_bytes: builtins.int | None = ..., + xla_gpu_reduce_scatter_combine_threshold_bytes: builtins.int | None = ..., + xla_gpu_enable_all_gather_combine_by_dim: builtins.bool | None = ..., + xla_gpu_enable_reduce_scatter_combine_by_dim: builtins.bool | None = ..., + xla_gpu_enable_reassociation_for_converted_ar: builtins.bool | None = ..., + xla_gpu_all_reduce_blueconnect_num_devices_per_host: builtins.int | None = ..., + xla_gpu_enable_while_loop_reduce_scatter_code_motion: builtins.bool | None = ..., + xla_gpu_collective_inflation_factor: builtins.int | None = ..., + xla_llvm_force_inline_before_split: builtins.bool | None = ..., + xla_gpu_enable_cudnn_frontend: builtins.bool | None = ..., + xla_gpu_enable_cudnn_fmha: builtins.bool | None = ..., + xla_gpu_fused_attention_use_cudnn_rng: builtins.bool | None = ..., + xla_gpu_enable_cudnn_layer_norm: builtins.bool | None = ..., + xla_dump_disable_metadata: builtins.bool | None = ..., + xla_dump_hlo_pipeline_re: builtins.str | None = ..., + xla_gpu_strict_conv_algorithm_picker: builtins.bool | None = ..., + xla_gpu_enable_custom_fusions: builtins.bool | None = ..., + xla_gpu_enable_custom_fusions_re: builtins.str | None = ..., + xla_gpu_enable_dynamic_slice_fusion: builtins.bool | None = ..., + xla_gpu_nccl_termination_timeout_seconds: builtins.int | None = ..., + xla_gpu_enable_shared_constants: builtins.bool | None = ..., + xla_gpu_enable_cublaslt: builtins.bool | None = ..., + xla_gpu_enable_command_buffer: ( + collections.abc.Iterable[global___DebugOptions.CommandBufferCmdType.ValueType] | None + ) = ..., + xla_gpu_graph_min_graph_size: builtins.int | None = ..., + xla_gpu_graph_enable_concurrent_region: builtins.bool | None = ..., + xla_gpu_redzone_scratch_max_megabytes: builtins.int | None = ..., + xla_gpu_redzone_padding_bytes: builtins.int | None = ..., + xla_cpu_use_acl: builtins.bool | None = ..., + xla_cpu_strict_dot_conv_math: builtins.bool | None = ..., + xla_gpu_use_runtime_fusion: builtins.bool | None = ..., + xla_dump_latency_hiding_schedule: builtins.bool | None = ..., + xla_cpu_enable_mlir_tiling_and_fusion: builtins.bool | None = ..., + xla_cpu_enable_custom_matmul_tiling: builtins.bool | None = ..., + xla_cpu_matmul_tiling_m_dim: builtins.int | None = ..., + xla_cpu_matmul_tiling_n_dim: builtins.int | None = ..., + xla_cpu_matmul_tiling_k_dim: builtins.int | None = ..., + xla_cpu_enable_mlir_fusion_outlining: builtins.bool | None = ..., + xla_cpu_enable_experimental_deallocation: builtins.bool | None = ..., + xla_gpu_enable_latency_hiding_scheduler: builtins.bool | None = ..., + xla_gpu_enable_highest_priority_async_stream: builtins.bool | None = ..., + xla_gpu_enable_analytical_latency_estimator: builtins.bool | None = ..., + xla_gpu_lhs_enable_gpu_async_tracker: builtins.bool | None = ..., + xla_gpu_pgle_profile_file_or_directory_path: builtins.str | None = ..., + xla_gpu_memory_limit_slop_factor: builtins.int | None = ..., + xla_gpu_enable_pipelined_collectives: builtins.bool | None = ..., + xla_gpu_enable_pipelined_all_reduce: builtins.bool | None = ..., + xla_gpu_enable_pipelined_all_gather: builtins.bool | None = ..., + xla_gpu_enable_pipelined_reduce_scatter: builtins.bool | None = ..., + xla_gpu_enable_pipelined_p2p: builtins.bool | None = ..., + xla_gpu_run_post_layout_collective_pipeliner: builtins.bool | None = ..., + xla_gpu_collective_permute_decomposer_threshold: builtins.int | None = ..., + xla_partitioning_algorithm: global___DebugOptions.PartitioningAlgorithm.ValueType | None = ..., + xla_gpu_enable_triton_gemm: builtins.bool | None = ..., + xla_gpu_enable_cudnn_int8x32_convolution_reordering: builtins.bool | None = ..., + xla_gpu_triton_gemm_any: builtins.bool | None = ..., + xla_gpu_exhaustive_tiling_search: builtins.bool | None = ..., + xla_gpu_enable_priority_fusion: builtins.bool | None = ..., + xla_gpu_dump_autotune_results_to: builtins.str | None = ..., + xla_gpu_load_autotune_results_from: builtins.str | None = ..., + xla_gpu_target_config_filename: builtins.str | None = ..., + xla_gpu_auto_spmd_partitioning_memory_budget_gb: builtins.int | None = ..., + xla_gpu_auto_spmd_partitioning_memory_budget_ratio: builtins.float | None = ..., + xla_gpu_triton_gemm_disable_reduced_precision_reduction: builtins.bool | None = ..., + xla_gpu_triton_fusion_level: builtins.int | None = ..., + xla_gpu_dump_autotuned_gemm_fusions: builtins.bool | None = ..., + xla_gpu_override_gemm_autotuner: builtins.str | None = ..., + xla_gpu_copy_insertion_use_region_analysis: builtins.bool | None = ..., + xla_gpu_collect_cost_model_stats: builtins.bool | None = ..., + xla_gpu_enable_split_k_autotuning: builtins.bool | None = ..., + xla_gpu_enable_reduction_epilogue_fusion: builtins.bool | None = ..., + xla_gpu_enable_nccl_clique_optimization: builtins.bool | None = ..., + xla_gpu_mock_custom_calls: builtins.bool | None = ..., + xla_gpu_cublas_fallback: builtins.bool | None = ..., + xla_gpu_enable_while_loop_double_buffering: builtins.bool | None = ..., + xla_gpu_enable_while_loop_unrolling: global___DebugOptions.WhileLoopUnrolling.ValueType | None = ..., + xla_gpu_ensure_minor_dot_contraction_dims: builtins.bool | None = ..., + xla_gpu_filter_kernels_spilling_registers_on_autotuning: builtins.bool | None = ..., + xla_debug_buffer_assignment_show_max: builtins.int | None = ..., + xla_gpu_llvm_verification_level: builtins.int | None = ..., + xla_gpu_enable_cub_radix_sort: builtins.bool | None = ..., + xla_gpu_threshold_for_windowed_einsum_mib: builtins.int | None = ..., + xla_gpu_enable_triton_hopper: builtins.bool | None = ..., + xla_gpu_enable_nccl_user_buffers: builtins.bool | None = ..., + xla_gpu_enable_nccl_comm_splitting: builtins.bool | None = ..., + xla_gpu_enable_nccl_per_stream_comms: builtins.bool | None = ..., + xla_gpu_enable_libnvptxcompiler: builtins.bool | None = ..., + xla_gpu_enable_dot_strength_reduction: builtins.bool | None = ..., + xla_gpu_multi_streamed_windowed_einsum: builtins.bool | None = ..., + xla_gpu_enable_bf16_6way_gemm: builtins.bool | None = ..., + xla_gpu_enable_bf16_3way_gemm: builtins.bool | None = ..., + xla_gpu_nccl_collective_max_nchannels: builtins.int | None = ..., + xla_gpu_nccl_p2p_max_nchannels: builtins.int | None = ..., + xla_gpu_mlir_emitter_level: builtins.int | None = ..., + xla_gpu_gemm_rewrite_size_threshold: builtins.int | None = ..., + xla_gpu_require_complete_aot_autotune_results: builtins.bool | None = ..., + xla_gpu_cudnn_gemm_fusion_level: builtins.int | None = ..., + xla_gpu_use_memcpy_local_p2p: builtins.bool | None = ..., + xla_gpu_autotune_max_solutions: builtins.int | None = ..., + xla_dump_large_constants: builtins.bool | None = ..., + xla_gpu_verify_triton_fusion_numerics: builtins.bool | None = ..., + xla_gpu_dump_autotune_logs_to: builtins.str | None = ..., + xla_reduce_window_rewrite_base_length: builtins.int | None = ..., + xla_gpu_enable_host_memory_offloading: builtins.bool | None = ..., + xla_gpu_exclude_nondeterministic_ops: builtins.bool | None = ..., + xla_gpu_nccl_terminate_on_error: builtins.bool | None = ..., + xla_gpu_shard_autotuning: builtins.bool | None = ..., + xla_gpu_enable_approx_costly_collectives: builtins.bool | None = ..., + xla_gpu_kernel_cache_file: builtins.str | None = ..., + xla_gpu_unsafe_pipelined_loop_annotator: builtins.bool | None = ..., + xla_gpu_per_fusion_autotune_cache_dir: builtins.str | None = ..., + xla_cmd_buffer_trace_cache_size: builtins.int | None = ..., + xla_gpu_temp_buffer_use_separate_color: builtins.bool | None = ..., + legacy_command_buffer_custom_call_targets: collections.abc.Iterable[builtins.str] | None = ..., + xla_syntax_sugar_async_ops: builtins.bool | None = ..., + xla_gpu_autotune_gemm_rtol: builtins.float | None = ..., + xla_enable_command_buffers_during_profiling: builtins.bool | None = ..., + xla_gpu_cudnn_gemm_max_plans: builtins.int | None = ..., + xla_gpu_enable_libnvjitlink: builtins.bool | None = ..., + xla_gpu_enable_triton_gemm_int4: builtins.bool | None = ..., + xla_gpu_async_dot: builtins.bool | None = ..., + xla_gpu_enable_pgle_accuracy_checker: builtins.bool | None = ..., + xla_gpu_executable_warn_stuck_timeout_seconds: builtins.int | None = ..., + xla_gpu_executable_terminate_timeout_seconds: builtins.int | None = ..., + xla_experimental_ignore_channel_id: builtins.bool | None = ..., + xla_backend_extra_options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "legacy_command_buffer_custom_call_targets", + b"legacy_command_buffer_custom_call_targets", + "xla_allow_excess_precision", + b"xla_allow_excess_precision", + "xla_allow_scalar_index_dynamic_ops", + b"xla_allow_scalar_index_dynamic_ops", + "xla_backend_extra_options", + b"xla_backend_extra_options", + "xla_backend_optimization_level", + b"xla_backend_optimization_level", + "xla_cmd_buffer_trace_cache_size", + b"xla_cmd_buffer_trace_cache_size", + "xla_cpu_enable_concurrency_optimized_scheduler", + b"xla_cpu_enable_concurrency_optimized_scheduler", + "xla_cpu_enable_custom_matmul_tiling", + b"xla_cpu_enable_custom_matmul_tiling", + "xla_cpu_enable_experimental_deallocation", + b"xla_cpu_enable_experimental_deallocation", + "xla_cpu_enable_fast_math", + b"xla_cpu_enable_fast_math", + "xla_cpu_enable_fast_min_max", + b"xla_cpu_enable_fast_min_max", + "xla_cpu_enable_mlir_fusion_outlining", + b"xla_cpu_enable_mlir_fusion_outlining", + "xla_cpu_enable_mlir_tiling_and_fusion", + b"xla_cpu_enable_mlir_tiling_and_fusion", + "xla_cpu_enable_xprof_traceme", + b"xla_cpu_enable_xprof_traceme", + "xla_cpu_fast_math_honor_division", + b"xla_cpu_fast_math_honor_division", + "xla_cpu_fast_math_honor_functions", + b"xla_cpu_fast_math_honor_functions", + "xla_cpu_fast_math_honor_infs", + b"xla_cpu_fast_math_honor_infs", + "xla_cpu_fast_math_honor_nans", + b"xla_cpu_fast_math_honor_nans", + "xla_cpu_matmul_tiling_k_dim", + b"xla_cpu_matmul_tiling_k_dim", + "xla_cpu_matmul_tiling_m_dim", + b"xla_cpu_matmul_tiling_m_dim", + "xla_cpu_matmul_tiling_n_dim", + b"xla_cpu_matmul_tiling_n_dim", + "xla_cpu_multi_thread_eigen", + b"xla_cpu_multi_thread_eigen", + "xla_cpu_parallel_codegen_split_count", + b"xla_cpu_parallel_codegen_split_count", + "xla_cpu_prefer_vector_width", + b"xla_cpu_prefer_vector_width", + "xla_cpu_strict_dot_conv_math", + b"xla_cpu_strict_dot_conv_math", + "xla_cpu_use_acl", + b"xla_cpu_use_acl", + "xla_cpu_use_mkl_dnn", + b"xla_cpu_use_mkl_dnn", + "xla_cpu_use_thunk_runtime", + b"xla_cpu_use_thunk_runtime", + "xla_debug_buffer_assignment_show_max", + b"xla_debug_buffer_assignment_show_max", + "xla_detailed_logging", + b"xla_detailed_logging", + "xla_disable_all_hlo_passes", + b"xla_disable_all_hlo_passes", + "xla_disable_hlo_passes", + b"xla_disable_hlo_passes", + "xla_dump_compress_protos", + b"xla_dump_compress_protos", + "xla_dump_disable_metadata", + b"xla_dump_disable_metadata", + "xla_dump_enable_mlir_pretty_form", + b"xla_dump_enable_mlir_pretty_form", + "xla_dump_fusion_visualization", + b"xla_dump_fusion_visualization", + "xla_dump_hlo_as_dot", + b"xla_dump_hlo_as_dot", + "xla_dump_hlo_as_html", + b"xla_dump_hlo_as_html", + "xla_dump_hlo_as_long_text", + b"xla_dump_hlo_as_long_text", + "xla_dump_hlo_as_proto", + b"xla_dump_hlo_as_proto", + "xla_dump_hlo_as_text", + b"xla_dump_hlo_as_text", + "xla_dump_hlo_as_url", + b"xla_dump_hlo_as_url", + "xla_dump_hlo_module_re", + b"xla_dump_hlo_module_re", + "xla_dump_hlo_pass_re", + b"xla_dump_hlo_pass_re", + "xla_dump_hlo_pipeline_re", + b"xla_dump_hlo_pipeline_re", + "xla_dump_hlo_snapshots", + b"xla_dump_hlo_snapshots", + "xla_dump_include_timestamp", + b"xla_dump_include_timestamp", + "xla_dump_large_constants", + b"xla_dump_large_constants", + "xla_dump_latency_hiding_schedule", + b"xla_dump_latency_hiding_schedule", + "xla_dump_max_hlo_modules", + b"xla_dump_max_hlo_modules", + "xla_dump_module_metadata", + b"xla_dump_module_metadata", + "xla_dump_to", + b"xla_dump_to", + "xla_eliminate_hlo_implicit_broadcast", + b"xla_eliminate_hlo_implicit_broadcast", + "xla_embed_ir_in_executable", + b"xla_embed_ir_in_executable", + "xla_enable_command_buffers_during_profiling", + b"xla_enable_command_buffers_during_profiling", + "xla_enable_dumping", + b"xla_enable_dumping", + "xla_enable_hlo_passes_only", + b"xla_enable_hlo_passes_only", + "xla_experimental_ignore_channel_id", + b"xla_experimental_ignore_channel_id", + "xla_force_host_platform_device_count", + b"xla_force_host_platform_device_count", + "xla_gpu_algorithm_denylist_path", + b"xla_gpu_algorithm_denylist_path", + "xla_gpu_all_gather_combine_threshold_bytes", + b"xla_gpu_all_gather_combine_threshold_bytes", + "xla_gpu_all_reduce_blueconnect_num_devices_per_host", + b"xla_gpu_all_reduce_blueconnect_num_devices_per_host", + "xla_gpu_all_reduce_combine_threshold_bytes", + b"xla_gpu_all_reduce_combine_threshold_bytes", + "xla_gpu_asm_extra_flags", + b"xla_gpu_asm_extra_flags", + "xla_gpu_async_dot", + b"xla_gpu_async_dot", + "xla_gpu_auto_spmd_partitioning_memory_budget_gb", + b"xla_gpu_auto_spmd_partitioning_memory_budget_gb", + "xla_gpu_auto_spmd_partitioning_memory_budget_ratio", + b"xla_gpu_auto_spmd_partitioning_memory_budget_ratio", + "xla_gpu_autotune_gemm_rtol", + b"xla_gpu_autotune_gemm_rtol", + "xla_gpu_autotune_level", + b"xla_gpu_autotune_level", + "xla_gpu_autotune_max_solutions", + b"xla_gpu_autotune_max_solutions", + "xla_gpu_collect_cost_model_stats", + b"xla_gpu_collect_cost_model_stats", + "xla_gpu_collective_inflation_factor", + b"xla_gpu_collective_inflation_factor", + "xla_gpu_collective_permute_decomposer_threshold", + b"xla_gpu_collective_permute_decomposer_threshold", + "xla_gpu_copy_insertion_use_region_analysis", + b"xla_gpu_copy_insertion_use_region_analysis", + "xla_gpu_crash_on_verification_failures", + b"xla_gpu_crash_on_verification_failures", + "xla_gpu_cublas_fallback", + b"xla_gpu_cublas_fallback", + "xla_gpu_cuda_data_dir", + b"xla_gpu_cuda_data_dir", + "xla_gpu_cudnn_gemm_fusion_level", + b"xla_gpu_cudnn_gemm_fusion_level", + "xla_gpu_cudnn_gemm_max_plans", + b"xla_gpu_cudnn_gemm_max_plans", + "xla_gpu_deterministic_ops", + b"xla_gpu_deterministic_ops", + "xla_gpu_disable_async_collectives", + b"xla_gpu_disable_async_collectives", + "xla_gpu_disable_gpuasm_optimizations", + b"xla_gpu_disable_gpuasm_optimizations", + "xla_gpu_dump_autotune_logs_to", + b"xla_gpu_dump_autotune_logs_to", + "xla_gpu_dump_autotune_results_to", + b"xla_gpu_dump_autotune_results_to", + "xla_gpu_dump_autotuned_gemm_fusions", + b"xla_gpu_dump_autotuned_gemm_fusions", + "xla_gpu_dump_llvmir", + b"xla_gpu_dump_llvmir", + "xla_gpu_enable_all_gather_combine_by_dim", + b"xla_gpu_enable_all_gather_combine_by_dim", + "xla_gpu_enable_analytical_latency_estimator", + b"xla_gpu_enable_analytical_latency_estimator", + "xla_gpu_enable_approx_costly_collectives", + b"xla_gpu_enable_approx_costly_collectives", + "xla_gpu_enable_bf16_3way_gemm", + b"xla_gpu_enable_bf16_3way_gemm", + "xla_gpu_enable_bf16_6way_gemm", + b"xla_gpu_enable_bf16_6way_gemm", + "xla_gpu_enable_command_buffer", + b"xla_gpu_enable_command_buffer", + "xla_gpu_enable_cub_radix_sort", + b"xla_gpu_enable_cub_radix_sort", + "xla_gpu_enable_cublaslt", + b"xla_gpu_enable_cublaslt", + "xla_gpu_enable_cudnn_fmha", + b"xla_gpu_enable_cudnn_fmha", + "xla_gpu_enable_cudnn_frontend", + b"xla_gpu_enable_cudnn_frontend", + "xla_gpu_enable_cudnn_int8x32_convolution_reordering", + b"xla_gpu_enable_cudnn_int8x32_convolution_reordering", + "xla_gpu_enable_cudnn_layer_norm", + b"xla_gpu_enable_cudnn_layer_norm", + "xla_gpu_enable_custom_fusions", + b"xla_gpu_enable_custom_fusions", + "xla_gpu_enable_custom_fusions_re", + b"xla_gpu_enable_custom_fusions_re", + "xla_gpu_enable_dot_strength_reduction", + b"xla_gpu_enable_dot_strength_reduction", + "xla_gpu_enable_dynamic_slice_fusion", + b"xla_gpu_enable_dynamic_slice_fusion", + "xla_gpu_enable_fast_min_max", + b"xla_gpu_enable_fast_min_max", + "xla_gpu_enable_highest_priority_async_stream", + b"xla_gpu_enable_highest_priority_async_stream", + "xla_gpu_enable_host_memory_offloading", + b"xla_gpu_enable_host_memory_offloading", + "xla_gpu_enable_latency_hiding_scheduler", + b"xla_gpu_enable_latency_hiding_scheduler", + "xla_gpu_enable_libnvjitlink", + b"xla_gpu_enable_libnvjitlink", + "xla_gpu_enable_libnvptxcompiler", + b"xla_gpu_enable_libnvptxcompiler", + "xla_gpu_enable_llvm_module_compilation_parallelism", + b"xla_gpu_enable_llvm_module_compilation_parallelism", + "xla_gpu_enable_nccl_clique_optimization", + b"xla_gpu_enable_nccl_clique_optimization", + "xla_gpu_enable_nccl_comm_splitting", + b"xla_gpu_enable_nccl_comm_splitting", + "xla_gpu_enable_nccl_per_stream_comms", + b"xla_gpu_enable_nccl_per_stream_comms", + "xla_gpu_enable_nccl_user_buffers", + b"xla_gpu_enable_nccl_user_buffers", + "xla_gpu_enable_pgle_accuracy_checker", + b"xla_gpu_enable_pgle_accuracy_checker", + "xla_gpu_enable_pipelined_all_gather", + b"xla_gpu_enable_pipelined_all_gather", + "xla_gpu_enable_pipelined_all_reduce", + b"xla_gpu_enable_pipelined_all_reduce", + "xla_gpu_enable_pipelined_collectives", + b"xla_gpu_enable_pipelined_collectives", + "xla_gpu_enable_pipelined_p2p", + b"xla_gpu_enable_pipelined_p2p", + "xla_gpu_enable_pipelined_reduce_scatter", + b"xla_gpu_enable_pipelined_reduce_scatter", + "xla_gpu_enable_priority_fusion", + b"xla_gpu_enable_priority_fusion", + "xla_gpu_enable_reassociation_for_converted_ar", + b"xla_gpu_enable_reassociation_for_converted_ar", + "xla_gpu_enable_reduce_scatter_combine_by_dim", + b"xla_gpu_enable_reduce_scatter_combine_by_dim", + "xla_gpu_enable_reduction_epilogue_fusion", + b"xla_gpu_enable_reduction_epilogue_fusion", + "xla_gpu_enable_shared_constants", + b"xla_gpu_enable_shared_constants", + "xla_gpu_enable_split_k_autotuning", + b"xla_gpu_enable_split_k_autotuning", + "xla_gpu_enable_triton_gemm", + b"xla_gpu_enable_triton_gemm", + "xla_gpu_enable_triton_gemm_int4", + b"xla_gpu_enable_triton_gemm_int4", + "xla_gpu_enable_triton_hopper", + b"xla_gpu_enable_triton_hopper", + "xla_gpu_enable_while_loop_double_buffering", + b"xla_gpu_enable_while_loop_double_buffering", + "xla_gpu_enable_while_loop_reduce_scatter_code_motion", + b"xla_gpu_enable_while_loop_reduce_scatter_code_motion", + "xla_gpu_enable_while_loop_unrolling", + b"xla_gpu_enable_while_loop_unrolling", + "xla_gpu_ensure_minor_dot_contraction_dims", + b"xla_gpu_ensure_minor_dot_contraction_dims", + "xla_gpu_exclude_nondeterministic_ops", + b"xla_gpu_exclude_nondeterministic_ops", + "xla_gpu_executable_terminate_timeout_seconds", + b"xla_gpu_executable_terminate_timeout_seconds", + "xla_gpu_executable_warn_stuck_timeout_seconds", + b"xla_gpu_executable_warn_stuck_timeout_seconds", + "xla_gpu_exhaustive_tiling_search", + b"xla_gpu_exhaustive_tiling_search", + "xla_gpu_experimental_autotune_cache_mode", + b"xla_gpu_experimental_autotune_cache_mode", + "xla_gpu_experimental_disable_binary_libraries", + b"xla_gpu_experimental_disable_binary_libraries", + "xla_gpu_experimental_enable_triton_softmax_priority_fusion", + b"xla_gpu_experimental_enable_triton_softmax_priority_fusion", + "xla_gpu_filter_kernels_spilling_registers_on_autotuning", + b"xla_gpu_filter_kernels_spilling_registers_on_autotuning", + "xla_gpu_force_compilation_parallelism", + b"xla_gpu_force_compilation_parallelism", + "xla_gpu_force_conv_nchw", + b"xla_gpu_force_conv_nchw", + "xla_gpu_force_conv_nhwc", + b"xla_gpu_force_conv_nhwc", + "xla_gpu_ftz", + b"xla_gpu_ftz", + "xla_gpu_fused_attention_use_cudnn_rng", + b"xla_gpu_fused_attention_use_cudnn_rng", + "xla_gpu_gemm_rewrite_size_threshold", + b"xla_gpu_gemm_rewrite_size_threshold", + "xla_gpu_graph_enable_concurrent_region", + b"xla_gpu_graph_enable_concurrent_region", + "xla_gpu_graph_min_graph_size", + b"xla_gpu_graph_min_graph_size", + "xla_gpu_kernel_cache_file", + b"xla_gpu_kernel_cache_file", + "xla_gpu_lhs_enable_gpu_async_tracker", + b"xla_gpu_lhs_enable_gpu_async_tracker", + "xla_gpu_llvm_ir_file", + b"xla_gpu_llvm_ir_file", + "xla_gpu_llvm_verification_level", + b"xla_gpu_llvm_verification_level", + "xla_gpu_load_autotune_results_from", + b"xla_gpu_load_autotune_results_from", + "xla_gpu_memory_limit_slop_factor", + b"xla_gpu_memory_limit_slop_factor", + "xla_gpu_mlir_emitter_level", + b"xla_gpu_mlir_emitter_level", + "xla_gpu_mock_custom_calls", + b"xla_gpu_mock_custom_calls", + "xla_gpu_multi_streamed_windowed_einsum", + b"xla_gpu_multi_streamed_windowed_einsum", + "xla_gpu_nccl_collective_max_nchannels", + b"xla_gpu_nccl_collective_max_nchannels", + "xla_gpu_nccl_p2p_max_nchannels", + b"xla_gpu_nccl_p2p_max_nchannels", + "xla_gpu_nccl_terminate_on_error", + b"xla_gpu_nccl_terminate_on_error", + "xla_gpu_nccl_termination_timeout_seconds", + b"xla_gpu_nccl_termination_timeout_seconds", + "xla_gpu_override_gemm_autotuner", + b"xla_gpu_override_gemm_autotuner", + "xla_gpu_per_fusion_autotune_cache_dir", + b"xla_gpu_per_fusion_autotune_cache_dir", + "xla_gpu_pgle_profile_file_or_directory_path", + b"xla_gpu_pgle_profile_file_or_directory_path", + "xla_gpu_ptx_file", + b"xla_gpu_ptx_file", + "xla_gpu_reduce_scatter_combine_threshold_bytes", + b"xla_gpu_reduce_scatter_combine_threshold_bytes", + "xla_gpu_redzone_padding_bytes", + b"xla_gpu_redzone_padding_bytes", + "xla_gpu_redzone_scratch_max_megabytes", + b"xla_gpu_redzone_scratch_max_megabytes", + "xla_gpu_require_complete_aot_autotune_results", + b"xla_gpu_require_complete_aot_autotune_results", + "xla_gpu_run_post_layout_collective_pipeliner", + b"xla_gpu_run_post_layout_collective_pipeliner", + "xla_gpu_shape_checks", + b"xla_gpu_shape_checks", + "xla_gpu_shard_autotuning", + b"xla_gpu_shard_autotuning", + "xla_gpu_strict_conv_algorithm_picker", + b"xla_gpu_strict_conv_algorithm_picker", + "xla_gpu_target_config_filename", + b"xla_gpu_target_config_filename", + "xla_gpu_temp_buffer_use_separate_color", + b"xla_gpu_temp_buffer_use_separate_color", + "xla_gpu_threshold_for_windowed_einsum_mib", + b"xla_gpu_threshold_for_windowed_einsum_mib", + "xla_gpu_triton_fusion_level", + b"xla_gpu_triton_fusion_level", + "xla_gpu_triton_gemm_any", + b"xla_gpu_triton_gemm_any", + "xla_gpu_triton_gemm_disable_reduced_precision_reduction", + b"xla_gpu_triton_gemm_disable_reduced_precision_reduction", + "xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", + b"xla_gpu_unsafe_fallback_to_driver_on_ptxas_not_found", + "xla_gpu_unsafe_pipelined_loop_annotator", + b"xla_gpu_unsafe_pipelined_loop_annotator", + "xla_gpu_unsupported_enable_triton_gemm", + b"xla_gpu_unsupported_enable_triton_gemm", + "xla_gpu_use_memcpy_local_p2p", + b"xla_gpu_use_memcpy_local_p2p", + "xla_gpu_use_runtime_fusion", + b"xla_gpu_use_runtime_fusion", + "xla_gpu_verify_triton_fusion_numerics", + b"xla_gpu_verify_triton_fusion_numerics", + "xla_hlo_evaluator_use_fast_path", + b"xla_hlo_evaluator_use_fast_path", + "xla_hlo_graph_addresses", + b"xla_hlo_graph_addresses", + "xla_hlo_graph_sharding_color", + b"xla_hlo_graph_sharding_color", + "xla_hlo_profile", + b"xla_hlo_profile", + "xla_llvm_disable_expensive_passes", + b"xla_llvm_disable_expensive_passes", + "xla_llvm_enable_alias_scope_metadata", + b"xla_llvm_enable_alias_scope_metadata", + "xla_llvm_enable_invariant_load_metadata", + b"xla_llvm_enable_invariant_load_metadata", + "xla_llvm_enable_noalias_metadata", + b"xla_llvm_enable_noalias_metadata", + "xla_llvm_force_inline_before_split", + b"xla_llvm_force_inline_before_split", + "xla_multiheap_size_constraint_per_heap", + b"xla_multiheap_size_constraint_per_heap", + "xla_partitioning_algorithm", + b"xla_partitioning_algorithm", + "xla_reduce_window_rewrite_base_length", + b"xla_reduce_window_rewrite_base_length", + "xla_step_marker_location", + b"xla_step_marker_location", + "xla_syntax_sugar_async_ops", + b"xla_syntax_sugar_async_ops", + "xla_test_all_input_layouts", + b"xla_test_all_input_layouts", + "xla_test_all_output_layouts", + b"xla_test_all_output_layouts", + "xla_tpu_detect_inf", + b"xla_tpu_detect_inf", + "xla_tpu_detect_nan", + b"xla_tpu_detect_nan", + ], + ) -> None: ... + +global___DebugOptions = DebugOptions + +@typing.final +class GpuCompilationEnvironment(google.protobuf.message.Message): + """Contains flags which affects the GPU compilation result. + These flags are part of Debug Options as of now, and will be migrated to + this proto. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DUMMY_FLAG_FIELD_NUMBER: builtins.int + dummy_flag: builtins.int + """Temporary dummy flag is added to test the flow. + To be removed when we add flags here. + """ + def __init__(self, *, dummy_flag: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["dummy_flag", b"dummy_flag"]) -> None: ... + +global___GpuCompilationEnvironment = GpuCompilationEnvironment + +@typing.final +class ShardableValueUpdatePairProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INPUT_PARAMETER_NUMBER_FIELD_NUMBER: builtins.int + PARAMETER_SHAPE_INDEX_FIELD_NUMBER: builtins.int + OUTPUT_SHAPE_INDEX_FIELD_NUMBER: builtins.int + input_parameter_number: builtins.int + @property + def parameter_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def output_shape_index(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + input_parameter_number: builtins.int | None = ..., + parameter_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + output_shape_index: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_parameter_number", + b"input_parameter_number", + "output_shape_index", + b"output_shape_index", + "parameter_shape_index", + b"parameter_shape_index", + ], + ) -> None: ... + +global___ShardableValueUpdatePairProto = ShardableValueUpdatePairProto + +@typing.final +class ExecutionOptions(google.protobuf.message.Message): + """These settings control how XLA compiles and/or runs code. Not all settings + will have an effect on every platform. + + When adding new fields, keep in mind that boolean fields default to false. + Next id: 25. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SHAPE_WITH_OUTPUT_LAYOUT_FIELD_NUMBER: builtins.int + SEED_FIELD_NUMBER: builtins.int + DEBUG_OPTIONS_FIELD_NUMBER: builtins.int + DEVICE_HANDLES_FIELD_NUMBER: builtins.int + NUM_REPLICAS_FIELD_NUMBER: builtins.int + DEVICE_ASSIGNMENT_FIELD_NUMBER: builtins.int + ALIAS_PASSTHROUGH_PARAMS_FIELD_NUMBER: builtins.int + NUM_PARTITIONS_FIELD_NUMBER: builtins.int + LAUNCH_ID_FIELD_NUMBER: builtins.int + USE_SPMD_PARTITIONING_FIELD_NUMBER: builtins.int + USE_AUTO_SPMD_PARTITIONING_FIELD_NUMBER: builtins.int + AUTO_SPMD_PARTITIONING_MESH_SHAPE_FIELD_NUMBER: builtins.int + AUTO_SPMD_PARTITIONING_MESH_IDS_FIELD_NUMBER: builtins.int + DEDUPLICATE_HLO_FIELD_NUMBER: builtins.int + ALLOW_SPMD_SHARDING_PROPAGATION_TO_PARAMETERS_FIELD_NUMBER: builtins.int + ALLOW_SPMD_SHARDING_PROPAGATION_TO_OUTPUT_FIELD_NUMBER: builtins.int + PARAM_REQUIRES_BROADCAST_VIA_COLLECTIVES_FIELD_NUMBER: builtins.int + ALLOW_SEPARATE_SHARDING_PROGRAMS_FIELD_NUMBER: builtins.int + SHARDABLE_VALUE_UPDATE_PAIRS_FIELD_NUMBER: builtins.int + FDO_PROFILE_FIELD_NUMBER: builtins.int + DEVICE_MEMORY_SIZE_FIELD_NUMBER: builtins.int + USE_SHARDY_PARTITIONER_FIELD_NUMBER: builtins.int + seed: builtins.int + """Used to seed random-number generators used in this computation. If this is + 0, we generate a seed ourselves. + + TODO(b/32083678): Changing the seed unnecessarily forces a recompilation. + """ + num_replicas: builtins.int + """Number of replicas of the computation to run. If zero, uses the default + number of replicas for the XLA service. + """ + alias_passthrough_params: builtins.bool + """Alias input and output buffers for parameters that are passed-through XLA + modules without being changed. + """ + num_partitions: builtins.int + """Number of partitions of the computation to run (model parallelism). + If zero, uses the default number of partitions for the XLA service. + """ + launch_id: builtins.int + """Used to identify a set of programs that should be launch together.""" + use_spmd_partitioning: builtins.bool + """Indicates whether to use SPMD (true) or MPMD (false) partitioning when + num_partitions > 1 and XLA is requested to partition the input program. + """ + use_auto_spmd_partitioning: builtins.bool + """Whether to automatically generate XLA shardings for SPMD partitioner.""" + deduplicate_hlo: builtins.bool + """If set, deduplicate hlo into function calls to reduce binary size. Only + works on TPU. + """ + allow_separate_sharding_programs: builtins.bool + """If enabled, the compiler may generate sharding and unsharding programs as + separate HLO modules, and modify the main program's input and output to + be sharded. + """ + fdo_profile: builtins.bytes + """Profiling data for feedback directed optimizations. Note that this is not + the only way to feed FDO data into the compiler and individual backends + may choose to get FDO data by other means. + """ + device_memory_size: builtins.int + """Amount of device memory available for the executable to use.""" + use_shardy_partitioner: builtins.bool + """Use Shardy, a new partitioner, to replace the existing + ShardingPropagation and SpmdPartitioner. See go/xla-sdy-pipeline for + details. + """ + @property + def shape_with_output_layout(self) -> tensorflow.compiler.xla.xla_data_pb2.ShapeProto: + """This optional field's layout is used as a hint when storing the output of + this computation. Subsequent transfers of this output array to the client + may be faster when using this layout. + + We use a Shape here to accommodate computations that return a tuple. + """ + + @property + def debug_options(self) -> global___DebugOptions: ... + @property + def device_handles( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.xla_data_pb2.DeviceHandle]: + """This optional field specifies a particular set of devices to run the + computation on. The computation will be partitioned across these devices. + If not provided, the default device will be chosen. + """ + + @property + def device_assignment(self) -> tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto: + """This optional field specifies the device assignment if known at compile + time. + """ + + @property + def auto_spmd_partitioning_mesh_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Device mesh shape used to create the sharding search space when + use_auto_spmd_partitioning=true. + """ + + @property + def auto_spmd_partitioning_mesh_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Device mesh ids compatible with the above mesh_shape used when + use_auto_spmd_partitioning=true. + """ + + @property + def allow_spmd_sharding_propagation_to_parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """Allows sharding propagation to propagate to the parameters. This changes + the input shape of the computation (which is undesirable), but it can be + used to allow to run partial compilation to determine what would be the + input sharding of a computation if XLA would be allowed to propagate the + sharding which can be used by higher level framework as a way to query + intermediate sharding of operations when multiple computation would be + chained and merged together. + This is a vector of bool, because the user can control which parameters can + have the sharding substituted. If only one boolean value is passed in the + vector that is interpreted as the value to be applied for every parameter. + """ + + @property + def allow_spmd_sharding_propagation_to_output( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """Allows sharding propagation to propagate to the outputs. This changes the + output shape of the computation (which is undesirable), but it can be used + to allow to run partial compilation to determine what would be the output + sharding of a computation if XLA would be allowed to propagate the sharding + which can be used by higher level framework as a way to query intermediate + sharding of operations when multiple computation would be chained and + merged together. + This is a vector of bool, because the user can control (if the output of + the computation is a tuple) which elements of the tuple can have the + sharding substituted and which don't. If only one boolean value is passed + in the vector that's interpreted as the value to be applied for every + single element of the output tuple. One value per element of the tuple + means that each value is attached to one of the output elements. + """ + + @property + def param_requires_broadcast_via_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """Whether to broadcast args across all replicas. One entry per arg.""" + + @property + def shardable_value_update_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: + """The list of input/output pairs in the main program that could be sharded.""" + + def __init__( + self, + *, + shape_with_output_layout: tensorflow.compiler.xla.xla_data_pb2.ShapeProto | None = ..., + seed: builtins.int | None = ..., + debug_options: global___DebugOptions | None = ..., + device_handles: collections.abc.Iterable[tensorflow.compiler.xla.xla_data_pb2.DeviceHandle] | None = ..., + num_replicas: builtins.int | None = ..., + device_assignment: tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto | None = ..., + alias_passthrough_params: builtins.bool | None = ..., + num_partitions: builtins.int | None = ..., + launch_id: builtins.int | None = ..., + use_spmd_partitioning: builtins.bool | None = ..., + use_auto_spmd_partitioning: builtins.bool | None = ..., + auto_spmd_partitioning_mesh_shape: collections.abc.Iterable[builtins.int] | None = ..., + auto_spmd_partitioning_mesh_ids: collections.abc.Iterable[builtins.int] | None = ..., + deduplicate_hlo: builtins.bool | None = ..., + allow_spmd_sharding_propagation_to_parameters: collections.abc.Iterable[builtins.bool] | None = ..., + allow_spmd_sharding_propagation_to_output: collections.abc.Iterable[builtins.bool] | None = ..., + param_requires_broadcast_via_collectives: collections.abc.Iterable[builtins.bool] | None = ..., + allow_separate_sharding_programs: builtins.bool | None = ..., + shardable_value_update_pairs: collections.abc.Iterable[global___ShardableValueUpdatePairProto] | None = ..., + fdo_profile: builtins.bytes | None = ..., + device_memory_size: builtins.int | None = ..., + use_shardy_partitioner: builtins.bool | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_options", + b"debug_options", + "device_assignment", + b"device_assignment", + "shape_with_output_layout", + b"shape_with_output_layout", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_passthrough_params", + b"alias_passthrough_params", + "allow_separate_sharding_programs", + b"allow_separate_sharding_programs", + "allow_spmd_sharding_propagation_to_output", + b"allow_spmd_sharding_propagation_to_output", + "allow_spmd_sharding_propagation_to_parameters", + b"allow_spmd_sharding_propagation_to_parameters", + "auto_spmd_partitioning_mesh_ids", + b"auto_spmd_partitioning_mesh_ids", + "auto_spmd_partitioning_mesh_shape", + b"auto_spmd_partitioning_mesh_shape", + "debug_options", + b"debug_options", + "deduplicate_hlo", + b"deduplicate_hlo", + "device_assignment", + b"device_assignment", + "device_handles", + b"device_handles", + "device_memory_size", + b"device_memory_size", + "fdo_profile", + b"fdo_profile", + "launch_id", + b"launch_id", + "num_partitions", + b"num_partitions", + "num_replicas", + b"num_replicas", + "param_requires_broadcast_via_collectives", + b"param_requires_broadcast_via_collectives", + "seed", + b"seed", + "shape_with_output_layout", + b"shape_with_output_layout", + "shardable_value_update_pairs", + b"shardable_value_update_pairs", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + "use_shardy_partitioner", + b"use_shardy_partitioner", + "use_spmd_partitioning", + b"use_spmd_partitioning", + ], + ) -> None: ... + +global___ExecutionOptions = ExecutionOptions + +@typing.final +class HloModuleConfigProto(google.protobuf.message.Message): + """Serialization of HloModuleConfig. See the C++ class definition for + descriptions of each field. + There are no guarantees of backwards or forwards compatibility. + Next id: 36. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _FusionConfigCollection: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _FusionConfigCollectionEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HloModuleConfigProto._FusionConfigCollection.ValueType], + builtins.type, + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + OFF: HloModuleConfigProto._FusionConfigCollection.ValueType # 0 + """Do not collect configuration.""" + PER_EDGE: HloModuleConfigProto._FusionConfigCollection.ValueType # 1 + """Collect per-edge configuration.""" + PER_NODE: HloModuleConfigProto._FusionConfigCollection.ValueType # 2 + """Collect per-node configuration.""" + + class FusionConfigCollection(_FusionConfigCollection, metaclass=_FusionConfigCollectionEnumTypeWrapper): ... + OFF: HloModuleConfigProto.FusionConfigCollection.ValueType # 0 + """Do not collect configuration.""" + PER_EDGE: HloModuleConfigProto.FusionConfigCollection.ValueType # 1 + """Collect per-edge configuration.""" + PER_NODE: HloModuleConfigProto.FusionConfigCollection.ValueType # 2 + """Collect per-node configuration.""" + + @typing.final + class BoolList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALS_FIELD_NUMBER: builtins.int + @property + def vals(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def __init__(self, *, vals: collections.abc.Iterable[builtins.bool] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["vals", b"vals"]) -> None: ... + + @typing.final + class Int64List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALS_FIELD_NUMBER: builtins.int + @property + def vals(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, *, vals: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["vals", b"vals"]) -> None: ... + + @typing.final + class Int64ListList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LISTS_FIELD_NUMBER: builtins.int + @property + def lists( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64List]: ... + def __init__(self, *, lists: collections.abc.Iterable[global___HloModuleConfigProto.Int64List] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["lists", b"lists"]) -> None: ... + + @typing.final + class DotConfigEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___HloModuleConfigProto.Int64List: ... + def __init__( + self, *, key: builtins.str | None = ..., value: global___HloModuleConfigProto.Int64List | None = ... + ) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing.final + class AnalysisAllowanceMapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.int + def __init__(self, *, key: builtins.str | None = ..., value: builtins.int | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + ENTRY_COMPUTATION_LAYOUT_FIELD_NUMBER: builtins.int + SEED_FIELD_NUMBER: builtins.int + LAUNCH_ID_FIELD_NUMBER: builtins.int + REPLICA_COUNT_FIELD_NUMBER: builtins.int + NUM_PARTITIONS_FIELD_NUMBER: builtins.int + PARAM_REQUIRES_BROADCAST_VIA_COLLECTIVES_FIELD_NUMBER: builtins.int + USE_SPMD_PARTITIONING_FIELD_NUMBER: builtins.int + USE_AUTO_SPMD_PARTITIONING_FIELD_NUMBER: builtins.int + AUTO_SPMD_PARTITIONING_MESH_SHAPE_FIELD_NUMBER: builtins.int + AUTO_SPMD_PARTITIONING_MESH_IDS_FIELD_NUMBER: builtins.int + DEDUPLICATE_HLO_FIELD_NUMBER: builtins.int + INTRA_OP_PARALLELISM_THREADS_FIELD_NUMBER: builtins.int + DEVICE_TYPE_FIELD_NUMBER: builtins.int + DEBUG_OPTIONS_FIELD_NUMBER: builtins.int + STATIC_DEVICE_ASSIGNMENT_FIELD_NUMBER: builtins.int + PRE_SIMULATION_DEVICE_ASSIGNMENT_FIELD_NUMBER: builtins.int + ALLOW_SEPARATE_SHARDING_PROGRAMS_FIELD_NUMBER: builtins.int + SHARDABLE_VALUE_UPDATE_PAIRS_FIELD_NUMBER: builtins.int + ALIAS_PASSTHROUGH_PARAMS_FIELD_NUMBER: builtins.int + CONTENT_AWARE_COMPUTATION_SORTING_FIELD_NUMBER: builtins.int + FUSION_CONFIG_COLLECTION_FIELD_NUMBER: builtins.int + FUSION_CONFIG_FIELD_NUMBER: builtins.int + DOT_CONFIG_FIELD_NUMBER: builtins.int + LAYOUT_CONFIG_FIELD_NUMBER: builtins.int + MEMORY_SPACE_ASSIGNMENT_CONFIG_FIELD_NUMBER: builtins.int + PHASE_ORDERING_CONFIG_FIELD_NUMBER: builtins.int + PHASE_INDEX_FIELD_NUMBER: builtins.int + ALLOW_SPMD_SHARDING_PROPAGATION_TO_PARAMETERS_FIELD_NUMBER: builtins.int + ALLOW_SPMD_SHARDING_PROPAGATION_TO_OUTPUT_FIELD_NUMBER: builtins.int + ANALYSIS_ALLOWANCE_MAP_FIELD_NUMBER: builtins.int + MATRIX_UNIT_OPERAND_PRECISION_FIELD_NUMBER: builtins.int + FDO_PROFILE_FIELD_NUMBER: builtins.int + DEVICE_MEMORY_SIZE_FIELD_NUMBER: builtins.int + USE_SHARDY_PARTITIONER_FIELD_NUMBER: builtins.int + seed: builtins.int + launch_id: builtins.int + replica_count: builtins.int + num_partitions: builtins.int + use_spmd_partitioning: builtins.bool + use_auto_spmd_partitioning: builtins.bool + deduplicate_hlo: builtins.bool + intra_op_parallelism_threads: builtins.int + device_type: builtins.str + allow_separate_sharding_programs: builtins.bool + alias_passthrough_params: builtins.bool + content_aware_computation_sorting: builtins.bool + fusion_config_collection: global___HloModuleConfigProto.FusionConfigCollection.ValueType + phase_index: builtins.int + matrix_unit_operand_precision: tensorflow.compiler.xla.xla_data_pb2.PrecisionConfig.Precision.ValueType + fdo_profile: builtins.bytes + device_memory_size: builtins.int + use_shardy_partitioner: builtins.bool + @property + def entry_computation_layout(self) -> tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto: ... + @property + def param_requires_broadcast_via_collectives( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + @property + def auto_spmd_partitioning_mesh_shape( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def auto_spmd_partitioning_mesh_ids( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def debug_options(self) -> global___DebugOptions: ... + @property + def static_device_assignment(self) -> tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto: ... + @property + def pre_simulation_device_assignment(self) -> tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto: + """The original device assignment before being changed by a simulator. + Simulators, like HybridSim, may change the device assignment to a smaller + topology, to make simulation easier. + """ + + @property + def shardable_value_update_pairs( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardableValueUpdatePairProto]: ... + @property + def fusion_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... + @property + def dot_config( + self, + ) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___HloModuleConfigProto.Int64List]: ... + @property + def layout_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.Int64ListList]: ... + @property + def memory_space_assignment_config( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def phase_ordering_config( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HloModuleConfigProto.BoolList]: ... + @property + def allow_spmd_sharding_propagation_to_parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + @property + def allow_spmd_sharding_propagation_to_output( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + @property + def analysis_allowance_map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: ... + def __init__( + self, + *, + entry_computation_layout: tensorflow.compiler.xla.xla_data_pb2.ProgramShapeProto | None = ..., + seed: builtins.int | None = ..., + launch_id: builtins.int | None = ..., + replica_count: builtins.int | None = ..., + num_partitions: builtins.int | None = ..., + param_requires_broadcast_via_collectives: collections.abc.Iterable[builtins.bool] | None = ..., + use_spmd_partitioning: builtins.bool | None = ..., + use_auto_spmd_partitioning: builtins.bool | None = ..., + auto_spmd_partitioning_mesh_shape: collections.abc.Iterable[builtins.int] | None = ..., + auto_spmd_partitioning_mesh_ids: collections.abc.Iterable[builtins.int] | None = ..., + deduplicate_hlo: builtins.bool | None = ..., + intra_op_parallelism_threads: builtins.int | None = ..., + device_type: builtins.str | None = ..., + debug_options: global___DebugOptions | None = ..., + static_device_assignment: tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto | None = ..., + pre_simulation_device_assignment: tensorflow.compiler.xla.xla_data_pb2.DeviceAssignmentProto | None = ..., + allow_separate_sharding_programs: builtins.bool | None = ..., + shardable_value_update_pairs: collections.abc.Iterable[global___ShardableValueUpdatePairProto] | None = ..., + alias_passthrough_params: builtins.bool | None = ..., + content_aware_computation_sorting: builtins.bool | None = ..., + fusion_config_collection: global___HloModuleConfigProto.FusionConfigCollection.ValueType | None = ..., + fusion_config: collections.abc.Iterable[global___HloModuleConfigProto.BoolList] | None = ..., + dot_config: collections.abc.Mapping[builtins.str, global___HloModuleConfigProto.Int64List] | None = ..., + layout_config: collections.abc.Iterable[global___HloModuleConfigProto.Int64ListList] | None = ..., + memory_space_assignment_config: collections.abc.Iterable[builtins.int] | None = ..., + phase_ordering_config: collections.abc.Iterable[global___HloModuleConfigProto.BoolList] | None = ..., + phase_index: builtins.int | None = ..., + allow_spmd_sharding_propagation_to_parameters: collections.abc.Iterable[builtins.bool] | None = ..., + allow_spmd_sharding_propagation_to_output: collections.abc.Iterable[builtins.bool] | None = ..., + analysis_allowance_map: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., + matrix_unit_operand_precision: tensorflow.compiler.xla.xla_data_pb2.PrecisionConfig.Precision.ValueType | None = ..., + fdo_profile: builtins.bytes | None = ..., + device_memory_size: builtins.int | None = ..., + use_shardy_partitioner: builtins.bool | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "debug_options", + b"debug_options", + "entry_computation_layout", + b"entry_computation_layout", + "pre_simulation_device_assignment", + b"pre_simulation_device_assignment", + "static_device_assignment", + b"static_device_assignment", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_passthrough_params", + b"alias_passthrough_params", + "allow_separate_sharding_programs", + b"allow_separate_sharding_programs", + "allow_spmd_sharding_propagation_to_output", + b"allow_spmd_sharding_propagation_to_output", + "allow_spmd_sharding_propagation_to_parameters", + b"allow_spmd_sharding_propagation_to_parameters", + "analysis_allowance_map", + b"analysis_allowance_map", + "auto_spmd_partitioning_mesh_ids", + b"auto_spmd_partitioning_mesh_ids", + "auto_spmd_partitioning_mesh_shape", + b"auto_spmd_partitioning_mesh_shape", + "content_aware_computation_sorting", + b"content_aware_computation_sorting", + "debug_options", + b"debug_options", + "deduplicate_hlo", + b"deduplicate_hlo", + "device_memory_size", + b"device_memory_size", + "device_type", + b"device_type", + "dot_config", + b"dot_config", + "entry_computation_layout", + b"entry_computation_layout", + "fdo_profile", + b"fdo_profile", + "fusion_config", + b"fusion_config", + "fusion_config_collection", + b"fusion_config_collection", + "intra_op_parallelism_threads", + b"intra_op_parallelism_threads", + "launch_id", + b"launch_id", + "layout_config", + b"layout_config", + "matrix_unit_operand_precision", + b"matrix_unit_operand_precision", + "memory_space_assignment_config", + b"memory_space_assignment_config", + "num_partitions", + b"num_partitions", + "param_requires_broadcast_via_collectives", + b"param_requires_broadcast_via_collectives", + "phase_index", + b"phase_index", + "phase_ordering_config", + b"phase_ordering_config", + "pre_simulation_device_assignment", + b"pre_simulation_device_assignment", + "replica_count", + b"replica_count", + "seed", + b"seed", + "shardable_value_update_pairs", + b"shardable_value_update_pairs", + "static_device_assignment", + b"static_device_assignment", + "use_auto_spmd_partitioning", + b"use_auto_spmd_partitioning", + "use_shardy_partitioner", + b"use_shardy_partitioner", + "use_spmd_partitioning", + b"use_spmd_partitioning", + ], + ) -> None: ... + +global___HloModuleConfigProto = HloModuleConfigProto + +@typing.final +class HloModuleProtoWithConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HLO_MODULE_FIELD_NUMBER: builtins.int + CONFIG_FIELD_NUMBER: builtins.int + @property + def hlo_module(self) -> tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto: ... + @property + def config(self) -> global___HloModuleConfigProto: ... + def __init__( + self, + *, + hlo_module: tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto | None = ..., + config: global___HloModuleConfigProto | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["config", b"config", "hlo_module", b"hlo_module"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["config", b"config", "hlo_module", b"hlo_module"]) -> None: ... + +global___HloModuleProtoWithConfig = HloModuleProtoWithConfig + +@typing.final +class ScheduleProto(google.protobuf.message.Message): + """A trace estimated by the Latency Hiding Scheduler.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Instruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ID_FIELD_NUMBER: builtins.int + START_TIMESTAMP_CYCLES_FIELD_NUMBER: builtins.int + END_TIMESTAMP_CYCLES_FIELD_NUMBER: builtins.int + id: builtins.int + """Instruction id (matches the id in HloInstructionProto).""" + start_timestamp_cycles: builtins.float + """Start and end timestamps in cycles.""" + end_timestamp_cycles: builtins.float + def __init__( + self, + *, + id: builtins.int | None = ..., + start_timestamp_cycles: builtins.float | None = ..., + end_timestamp_cycles: builtins.float | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "end_timestamp_cycles", b"end_timestamp_cycles", "id", b"id", "start_timestamp_cycles", b"start_timestamp_cycles" + ], + ) -> None: ... + + INSTRUCTIONS_FIELD_NUMBER: builtins.int + COMPUTATION_ID_FIELD_NUMBER: builtins.int + HLO_MODULE_FIELD_NUMBER: builtins.int + CYCLES_PER_MICROSECOND_FIELD_NUMBER: builtins.int + computation_id: builtins.int + """Computation id (matches the id in HloComputationProto).""" + cycles_per_microsecond: builtins.int + @property + def instructions( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScheduleProto.Instruction]: ... + @property + def hlo_module(self) -> tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto: ... + def __init__( + self, + *, + instructions: collections.abc.Iterable[global___ScheduleProto.Instruction] | None = ..., + computation_id: builtins.int | None = ..., + hlo_module: tensorflow.compiler.xla.service.hlo_pb2.HloModuleProto | None = ..., + cycles_per_microsecond: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["hlo_module", b"hlo_module"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "computation_id", + b"computation_id", + "cycles_per_microsecond", + b"cycles_per_microsecond", + "hlo_module", + b"hlo_module", + "instructions", + b"instructions", + ], + ) -> None: ... + +global___ScheduleProto = ScheduleProto diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/__init__.pyi new file mode 100644 index 0000000000..1eb72d000f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/__init__.pyi @@ -0,0 +1,12 @@ +from typing import NamedTuple + +from tensorflow.config import experimental as experimental + +class PhysicalDevice(NamedTuple): + name: str + device_type: str + +def list_physical_devices(device_type: None | str = None) -> list[PhysicalDevice]: ... +def get_visible_devices(device_type: None | str = None) -> list[PhysicalDevice]: ... +def set_visible_devices(devices: list[PhysicalDevice] | PhysicalDevice, device_type: None | str = None) -> None: ... +def __getattr__(name: str): ... # incomplete module diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/experimental.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/experimental.pyi new file mode 100644 index 0000000000..1cb74feba7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/config/experimental.pyi @@ -0,0 +1,16 @@ +import typing_extensions +from typing import TypedDict + +from tensorflow.config import PhysicalDevice + +class _MemoryInfo(TypedDict): + current: int + peak: int + +def get_memory_info(device: str) -> _MemoryInfo: ... +def reset_memory_stats(device: str) -> None: ... +@typing_extensions.deprecated("This function is deprecated in favor of tf.config.experimental.get_memory_info") +def get_memory_usage(device: PhysicalDevice) -> int: ... +def get_memory_growth(device: PhysicalDevice) -> bool: ... +def set_memory_growth(device: PhysicalDevice, enable: bool) -> None: ... +def __getattr__(name: str): ... # incomplete module diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi new file mode 100644 index 0000000000..9ffaef3d17 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_parser_configuration_pb2.pyi @@ -0,0 +1,153 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol messages for describing the configuration of the ExampleParserOp.""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.core.framework.tensor_pb2 +import tensorflow.core.framework.tensor_shape_pb2 +import tensorflow.core.framework.types_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class VarLenFeatureProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DTYPE_FIELD_NUMBER: builtins.int + VALUES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int + INDICES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int + SHAPES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType + values_output_tensor_name: builtins.str + indices_output_tensor_name: builtins.str + shapes_output_tensor_name: builtins.str + def __init__( + self, + *, + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + values_output_tensor_name: builtins.str | None = ..., + indices_output_tensor_name: builtins.str | None = ..., + shapes_output_tensor_name: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dtype", + b"dtype", + "indices_output_tensor_name", + b"indices_output_tensor_name", + "shapes_output_tensor_name", + b"shapes_output_tensor_name", + "values_output_tensor_name", + b"values_output_tensor_name", + ], + ) -> None: ... + +global___VarLenFeatureProto = VarLenFeatureProto + +@typing.final +class FixedLenFeatureProto(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DTYPE_FIELD_NUMBER: builtins.int + SHAPE_FIELD_NUMBER: builtins.int + DEFAULT_VALUE_FIELD_NUMBER: builtins.int + VALUES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType + values_output_tensor_name: builtins.str + @property + def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... + @property + def default_value(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ... + def __init__( + self, + *, + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + default_value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., + values_output_tensor_name: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["default_value", b"default_value", "shape", b"shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "default_value", + b"default_value", + "dtype", + b"dtype", + "shape", + b"shape", + "values_output_tensor_name", + b"values_output_tensor_name", + ], + ) -> None: ... + +global___FixedLenFeatureProto = FixedLenFeatureProto + +@typing.final +class FeatureConfiguration(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FIXED_LEN_FEATURE_FIELD_NUMBER: builtins.int + VAR_LEN_FEATURE_FIELD_NUMBER: builtins.int + @property + def fixed_len_feature(self) -> global___FixedLenFeatureProto: ... + @property + def var_len_feature(self) -> global___VarLenFeatureProto: ... + def __init__( + self, + *, + fixed_len_feature: global___FixedLenFeatureProto | None = ..., + var_len_feature: global___VarLenFeatureProto | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["config", b"config"] + ) -> typing.Literal["fixed_len_feature", "var_len_feature"] | None: ... + +global___FeatureConfiguration = FeatureConfiguration + +@typing.final +class ExampleParserConfiguration(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class FeatureMapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___FeatureConfiguration: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___FeatureConfiguration | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_MAP_FIELD_NUMBER: builtins.int + @property + def feature_map(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureConfiguration]: ... + def __init__( + self, *, feature_map: collections.abc.Mapping[builtins.str, global___FeatureConfiguration] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["feature_map", b"feature_map"]) -> None: ... + +global___ExampleParserConfiguration = ExampleParserConfiguration diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi new file mode 100644 index 0000000000..63cf2c14f1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/example_pb2.pyi @@ -0,0 +1,330 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol messages for describing input data Examples for machine learning +model training or inference. +""" + +import builtins +import typing + +import google.protobuf.descriptor +import google.protobuf.message +import tensorflow.core.example.feature_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Example(google.protobuf.message.Message): + """An Example is a mostly-normalized data format for storing data for + training and inference. It contains a key-value store (features); where + each key (string) maps to a Feature message (which is oneof packed BytesList, + FloatList, or Int64List). This flexible and compact format allows the + storage of large amounts of typed data, but requires that the data shape + and use be determined by the configuration files and parsers that are used to + read and write this format. That is, the Example is mostly *not* a + self-describing format. In TensorFlow, Examples are read in row-major + format, so any configuration that describes data with rank-2 or above + should keep this in mind. If you flatten a matrix into a FloatList it should + be stored as [ row 0 ... row 1 ... row M-1 ] + + An Example for a movie recommendation application: + features { + feature { + key: "age" + value { float_list { + value: 29.0 + }} + } + feature { + key: "movie" + value { bytes_list { + value: "The Shawshank Redemption" + value: "Fight Club" + }} + } + feature { + key: "movie_ratings" + value { float_list { + value: 9.0 + value: 9.7 + }} + } + feature { + key: "suggestion" + value { bytes_list { + value: "Inception" + }} + } + # Note that this feature exists to be used as a label in training. + # E.g., if training a logistic regression model to predict purchase + # probability in our learning tool we would set the label feature to + # "suggestion_purchased". + feature { + key: "suggestion_purchased" + value { float_list { + value: 1.0 + }} + } + # Similar to "suggestion_purchased" above this feature exists to be used + # as a label in training. + # E.g., if training a linear regression model to predict purchase + # price in our learning tool we would set the label feature to + # "purchase_price". + feature { + key: "purchase_price" + value { float_list { + value: 9.99 + }} + } + } + + A conformant Example data set obeys the following conventions: + - If a Feature K exists in one example with data type T, it must be of + type T in all other examples when present. It may be omitted. + - The number of instances of Feature K list data may vary across examples, + depending on the requirements of the model. + - If a Feature K doesn't exist in an example, a K-specific default will be + used, if configured. + - If a Feature K exists in an example but contains no items, the intent + is considered to be an empty tensor and no default will be used. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURES_FIELD_NUMBER: builtins.int + @property + def features(self) -> tensorflow.core.example.feature_pb2.Features: ... + def __init__(self, *, features: tensorflow.core.example.feature_pb2.Features | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["features", b"features"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["features", b"features"]) -> None: ... + +global___Example = Example + +@typing.final +class SequenceExample(google.protobuf.message.Message): + """A SequenceExample is an Example representing one or more sequences, and + some context. The context contains features which apply to the entire + example. The feature_lists contain a key, value map where each key is + associated with a repeated set of Features (a FeatureList). + A FeatureList thus represents the values of a feature identified by its key + over time / frames. + + Below is a SequenceExample for a movie recommendation application recording a + sequence of ratings by a user. The time-independent features ("locale", + "age", "favorites") describing the user are part of the context. The sequence + of movies the user rated are part of the feature_lists. For each movie in the + sequence we have information on its name and actors and the user's rating. + This information is recorded in three separate feature_list(s). + In the example below there are only two movies. All three feature_list(s), + namely "movie_ratings", "movie_names", and "actors" have a feature value for + both movies. Note, that "actors" is itself a bytes_list with multiple + strings per movie. + + context: { + feature: { + key : "locale" + value: { + bytes_list: { + value: [ "pt_BR" ] + } + } + } + feature: { + key : "age" + value: { + float_list: { + value: [ 19.0 ] + } + } + } + feature: { + key : "favorites" + value: { + bytes_list: { + value: [ "Majesty Rose", "Savannah Outen", "One Direction" ] + } + } + } + } + feature_lists: { + feature_list: { + key : "movie_ratings" + value: { + feature: { + float_list: { + value: [ 4.5 ] + } + } + feature: { + float_list: { + value: [ 5.0 ] + } + } + } + } + feature_list: { + key : "movie_names" + value: { + feature: { + bytes_list: { + value: [ "The Shawshank Redemption" ] + } + } + feature: { + bytes_list: { + value: [ "Fight Club" ] + } + } + } + } + feature_list: { + key : "actors" + value: { + feature: { + bytes_list: { + value: [ "Tim Robbins", "Morgan Freeman" ] + } + } + feature: { + bytes_list: { + value: [ "Brad Pitt", "Edward Norton", "Helena Bonham Carter" ] + } + } + } + } + } + + A conformant SequenceExample data set obeys the following conventions: + + Context: + - All conformant context features K must obey the same conventions as + a conformant Example's features (see above). + Feature lists: + - A FeatureList L may be missing in an example; it is up to the + parser configuration to determine if this is allowed or considered + an empty list (zero length). + - If a FeatureList L exists, it may be empty (zero length). + - If a FeatureList L is non-empty, all features within the FeatureList + must have the same data type T. Even across SequenceExamples, the type T + of the FeatureList identified by the same key must be the same. An entry + without any values may serve as an empty feature. + - If a FeatureList L is non-empty, it is up to the parser configuration + to determine if all features within the FeatureList must + have the same size. The same holds for this FeatureList across multiple + examples. + - For sequence modeling, e.g.: + http://colah.github.io/posts/2015-08-Understanding-LSTMs/ + https://github.com/tensorflow/nmt + the feature lists represent a sequence of frames. + In this scenario, all FeatureLists in a SequenceExample have the same + number of Feature messages, so that the ith element in each FeatureList + is part of the ith frame (or time step). + Examples of conformant and non-conformant examples' FeatureLists: + + Conformant FeatureLists: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + + Non-conformant FeatureLists (mismatched types): + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { int64_list: { value: [ 5 ] } } } + } } + + Conditionally conformant FeatureLists, the parser configuration determines + if the feature sizes must match: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0, 6.0 ] } } } + } } + + Conformant pair of SequenceExample + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + and: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } + feature: { float_list: { value: [ 2.0 ] } } } + } } + + Conformant pair of SequenceExample + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + and: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { } + } } + + Conditionally conformant pair of SequenceExample, the parser configuration + determines if the second feature_lists is consistent (zero-length) or + invalid (missing "movie_ratings"): + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + and: + feature_lists: { } + + Non-conformant pair of SequenceExample (mismatched types) + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + and: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { int64_list: { value: [ 4 ] } } + feature: { int64_list: { value: [ 5 ] } } + feature: { int64_list: { value: [ 2 ] } } } + } } + + Conditionally conformant pair of SequenceExample; the parser configuration + determines if the feature sizes must match: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.5 ] } } + feature: { float_list: { value: [ 5.0 ] } } } + } } + and: + feature_lists: { feature_list: { + key: "movie_ratings" + value: { feature: { float_list: { value: [ 4.0 ] } } + feature: { float_list: { value: [ 5.0, 3.0 ] } } + } } + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONTEXT_FIELD_NUMBER: builtins.int + FEATURE_LISTS_FIELD_NUMBER: builtins.int + @property + def context(self) -> tensorflow.core.example.feature_pb2.Features: ... + @property + def feature_lists(self) -> tensorflow.core.example.feature_pb2.FeatureLists: ... + def __init__( + self, + *, + context: tensorflow.core.example.feature_pb2.Features | None = ..., + feature_lists: tensorflow.core.example.feature_pb2.FeatureLists | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["context", b"context", "feature_lists", b"feature_lists"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["context", b"context", "feature_lists", b"feature_lists"]) -> None: ... + +global___SequenceExample = SequenceExample diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi new file mode 100644 index 0000000000..9bec4dd165 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/example/feature_pb2.pyi @@ -0,0 +1,222 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol messages for describing features for machine learning model +training or inference. + +There are three base Feature types: + - bytes + - float + - int64 + +A Feature contains Lists which may hold zero or more values. These +lists are the base values BytesList, FloatList, Int64List. + +Features are organized into categories by name. The Features message +contains the mapping from name to Feature. + +Example Features for a movie recommendation application: + feature { + key: "age" + value { float_list { + value: 29.0 + }} + } + feature { + key: "movie" + value { bytes_list { + value: "The Shawshank Redemption" + value: "Fight Club" + }} + } + feature { + key: "movie_ratings" + value { float_list { + value: 9.0 + value: 9.7 + }} + } + feature { + key: "suggestion" + value { bytes_list { + value: "Inception" + }} + } + feature { + key: "suggestion_purchased" + value { int64_list { + value: 1 + }} + } + feature { + key: "purchase_price" + value { float_list { + value: 9.99 + }} + } +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class BytesList(google.protobuf.message.Message): + """LINT.IfChange + Containers to hold repeated fundamental values. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + @property + def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.bytes] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + +global___BytesList = BytesList + +@typing.final +class FloatList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + @property + def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.float] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + +global___FloatList = FloatList + +@typing.final +class Int64List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + @property + def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, *, value: collections.abc.Iterable[builtins.int] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + +global___Int64List = Int64List + +@typing.final +class Feature(google.protobuf.message.Message): + """Containers for non-sequential data.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BYTES_LIST_FIELD_NUMBER: builtins.int + FLOAT_LIST_FIELD_NUMBER: builtins.int + INT64_LIST_FIELD_NUMBER: builtins.int + @property + def bytes_list(self) -> global___BytesList: ... + @property + def float_list(self) -> global___FloatList: ... + @property + def int64_list(self) -> global___Int64List: ... + def __init__( + self, + *, + bytes_list: global___BytesList | None = ..., + float_list: global___FloatList | None = ..., + int64_list: global___Int64List | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["kind", b"kind"] + ) -> typing.Literal["bytes_list", "float_list", "int64_list"] | None: ... + +global___Feature = Feature + +@typing.final +class Features(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class FeatureEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___Feature: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___Feature | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_FIELD_NUMBER: builtins.int + @property + def feature(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Feature]: + """Map from feature name to feature.""" + + def __init__(self, *, feature: collections.abc.Mapping[builtins.str, global___Feature] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["feature", b"feature"]) -> None: ... + +global___Features = Features + +@typing.final +class FeatureList(google.protobuf.message.Message): + """Containers for sequential data. + + A FeatureList contains lists of Features. These may hold zero or more + Feature values. + + FeatureLists are organized into categories by name. The FeatureLists message + contains the mapping from name to FeatureList. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_FIELD_NUMBER: builtins.int + @property + def feature(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Feature]: ... + def __init__(self, *, feature: collections.abc.Iterable[global___Feature] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["feature", b"feature"]) -> None: ... + +global___FeatureList = FeatureList + +@typing.final +class FeatureLists(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class FeatureListEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___FeatureList: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___FeatureList | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_LIST_FIELD_NUMBER: builtins.int + @property + def feature_list(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureList]: + """Map from feature name to feature list.""" + + def __init__(self, *, feature_list: collections.abc.Mapping[builtins.str, global___FeatureList] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["feature_list", b"feature_list"]) -> None: ... + +global___FeatureLists = FeatureLists diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi new file mode 100644 index 0000000000..9f4e541f22 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/allocation_description_pb2.pyi @@ -0,0 +1,64 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import typing + +import google.protobuf.descriptor +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class AllocationDescription(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUESTED_BYTES_FIELD_NUMBER: builtins.int + ALLOCATED_BYTES_FIELD_NUMBER: builtins.int + ALLOCATOR_NAME_FIELD_NUMBER: builtins.int + ALLOCATION_ID_FIELD_NUMBER: builtins.int + HAS_SINGLE_REFERENCE_FIELD_NUMBER: builtins.int + PTR_FIELD_NUMBER: builtins.int + requested_bytes: builtins.int + """Total number of bytes requested""" + allocated_bytes: builtins.int + """Total number of bytes allocated if known""" + allocator_name: builtins.str + """Name of the allocator used""" + allocation_id: builtins.int + """Identifier of the allocated buffer if known""" + has_single_reference: builtins.bool + """Set if this tensor only has one remaining reference""" + ptr: builtins.int + """Address of the allocation.""" + def __init__( + self, + *, + requested_bytes: builtins.int | None = ..., + allocated_bytes: builtins.int | None = ..., + allocator_name: builtins.str | None = ..., + allocation_id: builtins.int | None = ..., + has_single_reference: builtins.bool | None = ..., + ptr: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "allocated_bytes", + b"allocated_bytes", + "allocation_id", + b"allocation_id", + "allocator_name", + b"allocator_name", + "has_single_reference", + b"has_single_reference", + "ptr", + b"ptr", + "requested_bytes", + b"requested_bytes", + ], + ) -> None: ... + +global___AllocationDescription = AllocationDescription diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi new file mode 100644 index 0000000000..b3d8c42423 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/api_def_pb2.pyi @@ -0,0 +1,312 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Defines the text format for including per-op API definition and +overrides for client language op code generators. +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import tensorflow.core.framework.attr_value_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class ApiDef(google.protobuf.message.Message): + """Used to specify and override the default API & behavior in the + generated code for client languages, from what you would get from + the OpDef alone. There will be a set of ApiDefs that are common + to all client languages, and another set per client language. + The per-client-language ApiDefs will inherit values from the + common ApiDefs which it can either replace or modify. + + We separate the API definition from the OpDef so we can evolve the + API while remaining backwards compatible when interpreting old + graphs. Overrides go in an "api_def.pbtxt" file with a text-format + ApiDefs message. + + WARNING: Be *very* careful changing the API for any existing op -- + you can change the semantics of existing code. These changes may + need to wait until a major release of TensorFlow to avoid breaking + our compatibility promises. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Visibility: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _VisibilityEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ApiDef._Visibility.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + DEFAULT_VISIBILITY: ApiDef._Visibility.ValueType # 0 + """Normally this is "VISIBLE" unless you are inheriting a + different value from another ApiDef. + """ + VISIBLE: ApiDef._Visibility.ValueType # 1 + """Publicly visible in the API.""" + SKIP: ApiDef._Visibility.ValueType # 2 + """Do not include this op in the generated API. If visibility is + set to 'SKIP', other fields are ignored for this op. + """ + HIDDEN: ApiDef._Visibility.ValueType # 3 + """Hide this op by putting it into an internal namespace (or whatever + is appropriate in the target language). + """ + + class Visibility(_Visibility, metaclass=_VisibilityEnumTypeWrapper): ... + DEFAULT_VISIBILITY: ApiDef.Visibility.ValueType # 0 + """Normally this is "VISIBLE" unless you are inheriting a + different value from another ApiDef. + """ + VISIBLE: ApiDef.Visibility.ValueType # 1 + """Publicly visible in the API.""" + SKIP: ApiDef.Visibility.ValueType # 2 + """Do not include this op in the generated API. If visibility is + set to 'SKIP', other fields are ignored for this op. + """ + HIDDEN: ApiDef.Visibility.ValueType # 3 + """Hide this op by putting it into an internal namespace (or whatever + is appropriate in the target language). + """ + + @typing.final + class Endpoint(google.protobuf.message.Message): + """If you specify any endpoint, this will replace all of the + inherited endpoints. The first endpoint should be the + "canonical" endpoint, and should not be deprecated (unless all + endpoints are deprecated). + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + DEPRECATED_FIELD_NUMBER: builtins.int + DEPRECATION_VERSION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name should be either like "CamelCaseName" or + "Package.CamelCaseName". Client-language-specific ApiDefs may + use a snake_case convention instead of CamelCase. + """ + deprecated: builtins.bool + """Set if this endpoint is deprecated. If set to true, a message suggesting + to use a non-deprecated endpoint instead will be printed. If all + endpoints are deprecated, set deprecation_message in ApiDef instead. + """ + deprecation_version: builtins.int + """Major version when an endpoint will be deleted. For e.g. set this + value to 2 if endpoint should be removed in TensorFlow 2.0 and + deprecated in versions before that. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + deprecated: builtins.bool | None = ..., + deprecation_version: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "deprecated", b"deprecated", "deprecation_version", b"deprecation_version", "name", b"name" + ], + ) -> None: ... + + @typing.final + class Arg(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + RENAME_TO_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + rename_to: builtins.str + """Change the name used to access this arg in the API from what + is used in the GraphDef. Note that these names in `backticks` + will also be replaced in the summary & description fields. + """ + description: builtins.str + """Note: this will replace any inherited arg doc. There is no + current way of modifying arg descriptions (other than replacing + them entirely) as can be done with op descriptions. + """ + def __init__( + self, *, name: builtins.str | None = ..., rename_to: builtins.str | None = ..., description: builtins.str | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["description", b"description", "name", b"name", "rename_to", b"rename_to"] + ) -> None: ... + + @typing.final + class Attr(google.protobuf.message.Message): + """Description of the graph-construction-time configuration of this + Op. That is to say, this describes the attr fields that will + be specified in the NodeDef. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + RENAME_TO_FIELD_NUMBER: builtins.int + DEFAULT_VALUE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + rename_to: builtins.str + """Change the name used to access this attr in the API from what + is used in the GraphDef. Note that these names in `backticks` + will also be replaced in the summary & description fields. + """ + description: builtins.str + """Note: this will replace any inherited attr doc, there is no current + way of modifying attr descriptions as can be done with op descriptions. + """ + @property + def default_value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: + """Specify a new default value to use for this attr. This default + will be used when creating new graphs, as opposed to the + default in the OpDef, which will be used when interpreting old + GraphDefs. + """ + + def __init__( + self, + *, + name: builtins.str | None = ..., + rename_to: builtins.str | None = ..., + default_value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ..., + description: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["default_value", b"default_value"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "default_value", b"default_value", "description", b"description", "name", b"name", "rename_to", b"rename_to" + ], + ) -> None: ... + + GRAPH_OP_NAME_FIELD_NUMBER: builtins.int + DEPRECATION_MESSAGE_FIELD_NUMBER: builtins.int + DEPRECATION_VERSION_FIELD_NUMBER: builtins.int + VISIBILITY_FIELD_NUMBER: builtins.int + ENDPOINT_FIELD_NUMBER: builtins.int + IN_ARG_FIELD_NUMBER: builtins.int + OUT_ARG_FIELD_NUMBER: builtins.int + ARG_ORDER_FIELD_NUMBER: builtins.int + ATTR_FIELD_NUMBER: builtins.int + SUMMARY_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + DESCRIPTION_PREFIX_FIELD_NUMBER: builtins.int + DESCRIPTION_SUFFIX_FIELD_NUMBER: builtins.int + graph_op_name: builtins.str + """Name of the op (in the OpDef) to specify the API for.""" + deprecation_message: builtins.str + """If this op is deprecated, set deprecation message to the message + that should be logged when this op is used. + The message should indicate alternative op to use, if any. + """ + deprecation_version: builtins.int + """Major version when the op will be deleted. For e.g. set this + value to 2 if op API should be removed in TensorFlow 2.0 and + deprecated in versions before that. + """ + visibility: global___ApiDef.Visibility.ValueType + summary: builtins.str + """One-line human-readable description of what the Op does.""" + description: builtins.str + """Additional, longer human-readable description of what the Op does.""" + description_prefix: builtins.str + """Modify an existing/inherited description by adding text to the beginning + or end. + """ + description_suffix: builtins.str + @property + def endpoint(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Endpoint]: ... + @property + def in_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Arg]: ... + @property + def out_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Arg]: ... + @property + def arg_order(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of original in_arg names to specify new argument order. + Length of arg_order should be either empty to keep current order + or match size of in_arg. + """ + + @property + def attr(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Attr]: ... + def __init__( + self, + *, + graph_op_name: builtins.str | None = ..., + deprecation_message: builtins.str | None = ..., + deprecation_version: builtins.int | None = ..., + visibility: global___ApiDef.Visibility.ValueType | None = ..., + endpoint: collections.abc.Iterable[global___ApiDef.Endpoint] | None = ..., + in_arg: collections.abc.Iterable[global___ApiDef.Arg] | None = ..., + out_arg: collections.abc.Iterable[global___ApiDef.Arg] | None = ..., + arg_order: collections.abc.Iterable[builtins.str] | None = ..., + attr: collections.abc.Iterable[global___ApiDef.Attr] | None = ..., + summary: builtins.str | None = ..., + description: builtins.str | None = ..., + description_prefix: builtins.str | None = ..., + description_suffix: builtins.str | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "arg_order", + b"arg_order", + "attr", + b"attr", + "deprecation_message", + b"deprecation_message", + "deprecation_version", + b"deprecation_version", + "description", + b"description", + "description_prefix", + b"description_prefix", + "description_suffix", + b"description_suffix", + "endpoint", + b"endpoint", + "graph_op_name", + b"graph_op_name", + "in_arg", + b"in_arg", + "out_arg", + b"out_arg", + "summary", + b"summary", + "visibility", + b"visibility", + ], + ) -> None: ... + +global___ApiDef = ApiDef + +@typing.final +class ApiDefs(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + OP_FIELD_NUMBER: builtins.int + @property + def op(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef]: ... + def __init__(self, *, op: collections.abc.Iterable[global___ApiDef] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["op", b"op"]) -> None: ... + +global___ApiDefs = ApiDefs diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi new file mode 100644 index 0000000000..bc36030c51 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/attr_value_pb2.pyi @@ -0,0 +1,274 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.core.framework.tensor_pb2 +import tensorflow.core.framework.tensor_shape_pb2 +import tensorflow.core.framework.types_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class AttrValue(google.protobuf.message.Message): + """Protocol buffer representing the value for an attr used to configure an Op. + Comment indicates the corresponding attr type. Only the field matching the + attr type may be filled. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class ListValue(google.protobuf.message.Message): + """LINT.IfChange""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + S_FIELD_NUMBER: builtins.int + I_FIELD_NUMBER: builtins.int + F_FIELD_NUMBER: builtins.int + B_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + SHAPE_FIELD_NUMBER: builtins.int + TENSOR_FIELD_NUMBER: builtins.int + FUNC_FIELD_NUMBER: builtins.int + @property + def s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: + """ "list(string)" """ + + @property + def i(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """ "list(int)" """ + + @property + def f(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: + """ "list(float)" """ + + @property + def b(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: + """ "list(bool)" """ + + @property + def type( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + tensorflow.core.framework.types_pb2.DataType.ValueType + ]: + """ "list(type)" """ + + @property + def shape( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto + ]: + """ "list(shape)" """ + + @property + def tensor( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_pb2.TensorProto + ]: + """ "list(tensor)" """ + + @property + def func(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NameAttrList]: + """ "list(attr)" """ + + def __init__( + self, + *, + s: collections.abc.Iterable[builtins.bytes] | None = ..., + i: collections.abc.Iterable[builtins.int] | None = ..., + f: collections.abc.Iterable[builtins.float] | None = ..., + b: collections.abc.Iterable[builtins.bool] | None = ..., + type: collections.abc.Iterable[tensorflow.core.framework.types_pb2.DataType.ValueType] | None = ..., + shape: collections.abc.Iterable[tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto] | None = ..., + tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ..., + func: collections.abc.Iterable[global___NameAttrList] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + ], + ) -> None: ... + + S_FIELD_NUMBER: builtins.int + I_FIELD_NUMBER: builtins.int + F_FIELD_NUMBER: builtins.int + B_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + SHAPE_FIELD_NUMBER: builtins.int + TENSOR_FIELD_NUMBER: builtins.int + LIST_FIELD_NUMBER: builtins.int + FUNC_FIELD_NUMBER: builtins.int + PLACEHOLDER_FIELD_NUMBER: builtins.int + s: builtins.bytes + """"string" """ + i: builtins.int + """"int" """ + f: builtins.float + """"float" """ + b: builtins.bool + """"bool" """ + type: tensorflow.core.framework.types_pb2.DataType.ValueType + """"type" """ + placeholder: builtins.str + """This is a placeholder only used in nodes defined inside a + function. It indicates the attr value will be supplied when + the function is instantiated. For example, let us suppose a + node "N" in function "FN". "N" has an attr "A" with value + placeholder = "foo". When FN is instantiated with attr "foo" + set to "bar", the instantiated node N's attr A will have been + given the value "bar". + """ + @property + def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: + """ "shape" """ + + @property + def tensor(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: + """ "tensor" """ + + @property + def list(self) -> global___AttrValue.ListValue: + """any "list(...)" """ + + @property + def func(self) -> global___NameAttrList: + """ "func" represents a function. func.name is a function's name or + a primitive op's name. func.attr.first is the name of an attr + defined for that function. func.attr.second is the value for + that attr in the instantiation. + """ + + def __init__( + self, + *, + s: builtins.bytes | None = ..., + i: builtins.int | None = ..., + f: builtins.float | None = ..., + b: builtins.bool | None = ..., + type: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ..., + list: global___AttrValue.ListValue | None = ..., + func: global___NameAttrList | None = ..., + placeholder: builtins.str | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "list", + b"list", + "placeholder", + b"placeholder", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "b", + b"b", + "f", + b"f", + "func", + b"func", + "i", + b"i", + "list", + b"list", + "placeholder", + b"placeholder", + "s", + b"s", + "shape", + b"shape", + "tensor", + b"tensor", + "type", + b"type", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["value", b"value"] + ) -> typing.Literal["s", "i", "f", "b", "type", "shape", "tensor", "list", "func", "placeholder"] | None: ... + +global___AttrValue = AttrValue + +@typing.final +class NameAttrList(google.protobuf.message.Message): + """A list of attr names and their values. The whole list is attached + with a string name. E.g., MatMul[T=float]. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class AttrEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___AttrValue: ... + def __init__(self, *, key: builtins.str | None = ..., value: global___AttrValue | None = ...) -> None: ... + def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + ATTR_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AttrValue]: ... + def __init__( + self, *, name: builtins.str | None = ..., attr: collections.abc.Mapping[builtins.str, global___AttrValue] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["attr", b"attr", "name", b"name"]) -> None: ... + +global___NameAttrList = NameAttrList diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi new file mode 100644 index 0000000000..3ba7d01628 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cost_graph_pb2.pyi @@ -0,0 +1,229 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.core.framework.tensor_shape_pb2 +import tensorflow.core.framework.types_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class CostGraphDef(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class Node(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class InputInfo(google.protobuf.message.Message): + """Inputs of this node. They must be executed before this node can be + executed. An input is a particular output of another node, specified + by the node id and the output index. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PRECEDING_NODE_FIELD_NUMBER: builtins.int + PRECEDING_PORT_FIELD_NUMBER: builtins.int + preceding_node: builtins.int + preceding_port: builtins.int + def __init__( + self, *, preceding_node: builtins.int | None = ..., preceding_port: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["preceding_node", b"preceding_node", "preceding_port", b"preceding_port"] + ) -> None: ... + + @typing.final + class OutputInfo(google.protobuf.message.Message): + """Outputs of this node.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SIZE_FIELD_NUMBER: builtins.int + ALIAS_INPUT_PORT_FIELD_NUMBER: builtins.int + SHAPE_FIELD_NUMBER: builtins.int + DTYPE_FIELD_NUMBER: builtins.int + size: builtins.int + alias_input_port: builtins.int + """If >= 0, the output is an alias of an input. Note that an alias input + may itself be an alias. The algorithm will therefore need to follow + those pointers. + """ + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType + @property + def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... + def __init__( + self, + *, + size: builtins.int | None = ..., + alias_input_port: builtins.int | None = ..., + shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["shape", b"shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "alias_input_port", b"alias_input_port", "dtype", b"dtype", "shape", b"shape", "size", b"size" + ], + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + DEVICE_FIELD_NUMBER: builtins.int + ID_FIELD_NUMBER: builtins.int + INPUT_INFO_FIELD_NUMBER: builtins.int + OUTPUT_INFO_FIELD_NUMBER: builtins.int + TEMPORARY_MEMORY_SIZE_FIELD_NUMBER: builtins.int + PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int + HOST_TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int + DEVICE_TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int + DEVICE_PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int + COMPUTE_COST_FIELD_NUMBER: builtins.int + COMPUTE_TIME_FIELD_NUMBER: builtins.int + MEMORY_TIME_FIELD_NUMBER: builtins.int + IS_FINAL_FIELD_NUMBER: builtins.int + CONTROL_INPUT_FIELD_NUMBER: builtins.int + INACCURATE_FIELD_NUMBER: builtins.int + name: builtins.str + """The name of the node. Names are globally unique.""" + device: builtins.str + """The device of the node. Can be empty if the node is mapped to the + default partition or partitioning hasn't been run yet. + """ + id: builtins.int + """The id of the node. Node ids are only unique inside a partition.""" + temporary_memory_size: builtins.int + """Temporary memory used by this node.""" + persistent_memory_size: builtins.int + """Persistent memory used by this node.""" + host_temp_memory_size: builtins.int + device_temp_memory_size: builtins.int + device_persistent_memory_size: builtins.int + compute_cost: builtins.int + """Estimate of the computational cost of this node, in microseconds.""" + compute_time: builtins.int + """Analytical estimate of the computational cost of this node, in + microseconds. + """ + memory_time: builtins.int + """Analytical estimate of the memory access cost of this node, in + microseconds. + """ + is_final: builtins.bool + """If true, the output is permanent: it can't be discarded, because this + node is part of the "final output". Nodes may depend on final nodes. + """ + inaccurate: builtins.bool + """Are the costs inaccurate?""" + @property + def input_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.InputInfo]: ... + @property + def output_info( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.OutputInfo]: ... + @property + def control_input(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Ids of the control inputs for this node.""" + + def __init__( + self, + *, + name: builtins.str | None = ..., + device: builtins.str | None = ..., + id: builtins.int | None = ..., + input_info: collections.abc.Iterable[global___CostGraphDef.Node.InputInfo] | None = ..., + output_info: collections.abc.Iterable[global___CostGraphDef.Node.OutputInfo] | None = ..., + temporary_memory_size: builtins.int | None = ..., + persistent_memory_size: builtins.int | None = ..., + host_temp_memory_size: builtins.int | None = ..., + device_temp_memory_size: builtins.int | None = ..., + device_persistent_memory_size: builtins.int | None = ..., + compute_cost: builtins.int | None = ..., + compute_time: builtins.int | None = ..., + memory_time: builtins.int | None = ..., + is_final: builtins.bool | None = ..., + control_input: collections.abc.Iterable[builtins.int] | None = ..., + inaccurate: builtins.bool | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "compute_cost", + b"compute_cost", + "compute_time", + b"compute_time", + "control_input", + b"control_input", + "device", + b"device", + "device_persistent_memory_size", + b"device_persistent_memory_size", + "device_temp_memory_size", + b"device_temp_memory_size", + "host_temp_memory_size", + b"host_temp_memory_size", + "id", + b"id", + "inaccurate", + b"inaccurate", + "input_info", + b"input_info", + "is_final", + b"is_final", + "memory_time", + b"memory_time", + "name", + b"name", + "output_info", + b"output_info", + "persistent_memory_size", + b"persistent_memory_size", + "temporary_memory_size", + b"temporary_memory_size", + ], + ) -> None: ... + + @typing.final + class AggregatedCost(google.protobuf.message.Message): + """Total cost of this graph, typically used for balancing decisions.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + COST_FIELD_NUMBER: builtins.int + DIMENSION_FIELD_NUMBER: builtins.int + cost: builtins.float + """Aggregated cost value.""" + dimension: builtins.str + """Aggregated cost dimension (e.g. 'memory', 'compute', 'network').""" + def __init__(self, *, cost: builtins.float | None = ..., dimension: builtins.str | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["cost", b"cost", "dimension", b"dimension"]) -> None: ... + + NODE_FIELD_NUMBER: builtins.int + COST_FIELD_NUMBER: builtins.int + @property + def node(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node]: ... + @property + def cost( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.AggregatedCost]: ... + def __init__( + self, + *, + node: collections.abc.Iterable[global___CostGraphDef.Node] | None = ..., + cost: collections.abc.Iterable[global___CostGraphDef.AggregatedCost] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["cost", b"cost", "node", b"node"]) -> None: ... + +global___CostGraphDef = CostGraphDef diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi new file mode 100644 index 0000000000..1bf1c41641 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/cpp_shape_inference_pb2.pyi @@ -0,0 +1,110 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.core.framework.full_type_pb2 +import tensorflow.core.framework.tensor_shape_pb2 +import tensorflow.core.framework.types_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class CppShapeInferenceResult(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class HandleShapeAndType(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SHAPE_FIELD_NUMBER: builtins.int + DTYPE_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType + @property + def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... + @property + def type(self) -> tensorflow.core.framework.full_type_pb2.FullTypeDef: ... + def __init__( + self, + *, + shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + type: tensorflow.core.framework.full_type_pb2.FullTypeDef | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["shape", b"shape", "type", b"type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["dtype", b"dtype", "shape", b"shape", "type", b"type"]) -> None: ... + + @typing.final + class HandleData(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + IS_SET_FIELD_NUMBER: builtins.int + SHAPE_AND_TYPE_FIELD_NUMBER: builtins.int + is_set: builtins.bool + @property + def shape_and_type( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___CppShapeInferenceResult.HandleShapeAndType + ]: + """Only valid if .""" + + def __init__( + self, + *, + is_set: builtins.bool | None = ..., + shape_and_type: collections.abc.Iterable[global___CppShapeInferenceResult.HandleShapeAndType] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["is_set", b"is_set", "shape_and_type", b"shape_and_type"]) -> None: ... + + SHAPE_FIELD_NUMBER: builtins.int + HANDLE_DATA_FIELD_NUMBER: builtins.int + @property + def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ... + @property + def handle_data(self) -> global___CppShapeInferenceResult.HandleData: ... + def __init__( + self, + *, + shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + handle_data: global___CppShapeInferenceResult.HandleData | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["handle_data", b"handle_data", "shape", b"shape"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["handle_data", b"handle_data", "shape", b"shape"]) -> None: ... + +global___CppShapeInferenceResult = CppShapeInferenceResult + +@typing.final +class CppShapeInferenceInputsNeeded(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INPUT_TENSORS_NEEDED_FIELD_NUMBER: builtins.int + INPUT_TENSORS_AS_SHAPES_NEEDED_FIELD_NUMBER: builtins.int + @property + def input_tensors_needed(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + @property + def input_tensors_as_shapes_needed( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + input_tensors_needed: collections.abc.Iterable[builtins.int] | None = ..., + input_tensors_as_shapes_needed: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "input_tensors_as_shapes_needed", b"input_tensors_as_shapes_needed", "input_tensors_needed", b"input_tensors_needed" + ], + ) -> None: ... + +global___CppShapeInferenceInputsNeeded = CppShapeInferenceInputsNeeded diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi new file mode 100644 index 0000000000..afde78f967 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_metadata_pb2.pyi @@ -0,0 +1,25 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import typing + +import google.protobuf.descriptor +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Metadata(google.protobuf.message.Message): + """next: 2""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.bytes + def __init__(self, *, name: builtins.bytes | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +global___Metadata = Metadata diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi new file mode 100644 index 0000000000..724249f3c5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_options_pb2.pyi @@ -0,0 +1,720 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import tensorflow.core.framework.model_pb2 + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _AutoShardPolicy: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _AutoShardPolicyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutoShardPolicy.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + AUTO: _AutoShardPolicy.ValueType # 0 + """AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding.""" + FILE: _AutoShardPolicy.ValueType # 1 + """FILE: Shards by input files (i.e. each worker will get a set of files to + process). When this option is selected, make sure that there is at least as + many files as workers. If there are fewer input files than workers, a + runtime error will be raised. + """ + DATA: _AutoShardPolicy.ValueType # 2 + """DATA: Shards by elements produced by the dataset. Each worker will process + the whole dataset and discard the portion that is not for itself. Note that + for this mode to correctly partitions the dataset elements, the dataset + needs to produce elements in a deterministic order. + """ + HINT: _AutoShardPolicy.ValueType # 3 + """HINT: Looks for the presence of `shard(SHARD_HINT, ...)` which is treated + as a placeholder to replace with `shard(num_workers, worker_index)`. + """ + OFF: _AutoShardPolicy.ValueType # -1 + """OFF: No sharding will be performed.""" + +class AutoShardPolicy(_AutoShardPolicy, metaclass=_AutoShardPolicyEnumTypeWrapper): + """Represents the type of auto-sharding we enable.""" + +AUTO: AutoShardPolicy.ValueType # 0 +"""AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding.""" +FILE: AutoShardPolicy.ValueType # 1 +"""FILE: Shards by input files (i.e. each worker will get a set of files to +process). When this option is selected, make sure that there is at least as +many files as workers. If there are fewer input files than workers, a +runtime error will be raised. +""" +DATA: AutoShardPolicy.ValueType # 2 +"""DATA: Shards by elements produced by the dataset. Each worker will process +the whole dataset and discard the portion that is not for itself. Note that +for this mode to correctly partitions the dataset elements, the dataset +needs to produce elements in a deterministic order. +""" +HINT: AutoShardPolicy.ValueType # 3 +"""HINT: Looks for the presence of `shard(SHARD_HINT, ...)` which is treated +as a placeholder to replace with `shard(num_workers, worker_index)`. +""" +OFF: AutoShardPolicy.ValueType # -1 +"""OFF: No sharding will be performed.""" +global___AutoShardPolicy = AutoShardPolicy + +class _ExternalStatePolicy: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ExternalStatePolicyEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExternalStatePolicy.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + POLICY_WARN: _ExternalStatePolicy.ValueType # 0 + POLICY_IGNORE: _ExternalStatePolicy.ValueType # 1 + POLICY_FAIL: _ExternalStatePolicy.ValueType # 2 + +class ExternalStatePolicy(_ExternalStatePolicy, metaclass=_ExternalStatePolicyEnumTypeWrapper): + """Represents how to handle external state during serialization.""" + +POLICY_WARN: ExternalStatePolicy.ValueType # 0 +POLICY_IGNORE: ExternalStatePolicy.ValueType # 1 +POLICY_FAIL: ExternalStatePolicy.ValueType # 2 +global___ExternalStatePolicy = ExternalStatePolicy + +@typing.final +class AutotuneOptions(google.protobuf.message.Message): + """next: 6""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENABLED_FIELD_NUMBER: builtins.int + CPU_BUDGET_FIELD_NUMBER: builtins.int + RAM_BUDGET_FIELD_NUMBER: builtins.int + AUTOTUNE_ALGORITHM_FIELD_NUMBER: builtins.int + INITIAL_PARALLELISM_FIELD_NUMBER: builtins.int + enabled: builtins.bool + cpu_budget: builtins.int + ram_budget: builtins.int + autotune_algorithm: tensorflow.core.framework.model_pb2.AutotuneAlgorithm.ValueType + initial_parallelism: builtins.int + def __init__( + self, + *, + enabled: builtins.bool | None = ..., + cpu_budget: builtins.int | None = ..., + ram_budget: builtins.int | None = ..., + autotune_algorithm: tensorflow.core.framework.model_pb2.AutotuneAlgorithm.ValueType | None = ..., + initial_parallelism: builtins.int | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "autotune_algorithm", + b"autotune_algorithm", + "cpu_budget", + b"cpu_budget", + "enabled", + b"enabled", + "initial_parallelism", + b"initial_parallelism", + "optional_autotune_algorithm", + b"optional_autotune_algorithm", + "optional_cpu_budget", + b"optional_cpu_budget", + "optional_enabled", + b"optional_enabled", + "optional_initial_parallelism", + b"optional_initial_parallelism", + "optional_ram_budget", + b"optional_ram_budget", + "ram_budget", + b"ram_budget", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "autotune_algorithm", + b"autotune_algorithm", + "cpu_budget", + b"cpu_budget", + "enabled", + b"enabled", + "initial_parallelism", + b"initial_parallelism", + "optional_autotune_algorithm", + b"optional_autotune_algorithm", + "optional_cpu_budget", + b"optional_cpu_budget", + "optional_enabled", + b"optional_enabled", + "optional_initial_parallelism", + b"optional_initial_parallelism", + "optional_ram_budget", + b"optional_ram_budget", + "ram_budget", + b"ram_budget", + ], + ) -> None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_autotune_algorithm", b"optional_autotune_algorithm"] + ) -> typing.Literal["autotune_algorithm"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_cpu_budget", b"optional_cpu_budget"] + ) -> typing.Literal["cpu_budget"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_enabled", b"optional_enabled"] + ) -> typing.Literal["enabled"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_initial_parallelism", b"optional_initial_parallelism"] + ) -> typing.Literal["initial_parallelism"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_ram_budget", b"optional_ram_budget"] + ) -> typing.Literal["ram_budget"] | None: ... + +global___AutotuneOptions = AutotuneOptions + +@typing.final +class CardinalityOptions(google.protobuf.message.Message): + """next: 2""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ComputeLevel: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ComputeLevelEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CardinalityOptions._ComputeLevel.ValueType], builtins.type + ): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CARDINALITY_COMPUTE_UNSPECIFIED: CardinalityOptions._ComputeLevel.ValueType # 0 + CARDINALITY_COMPUTE_LOW: CardinalityOptions._ComputeLevel.ValueType # 1 + """Cardinality will only be computed if it can be determined in a cheap + manner (ie. without reading from file sources). If the cardinality would + be nontrivial to compute, Cardinality() will return UNKNOWN_CARDINALITY. + """ + CARDINALITY_COMPUTE_MODERATE: CardinalityOptions._ComputeLevel.ValueType # 2 + """Moderate effort will be made to determine cardinality, such as reading + index data from source files. If significant work is needed to compute + cardinality (e.g. reading entire source file contents or executing user + defined functions), Cardinality() will return UNKNOWN_CARDINALITY. + """ + + class ComputeLevel(_ComputeLevel, metaclass=_ComputeLevelEnumTypeWrapper): ... + CARDINALITY_COMPUTE_UNSPECIFIED: CardinalityOptions.ComputeLevel.ValueType # 0 + CARDINALITY_COMPUTE_LOW: CardinalityOptions.ComputeLevel.ValueType # 1 + """Cardinality will only be computed if it can be determined in a cheap + manner (ie. without reading from file sources). If the cardinality would + be nontrivial to compute, Cardinality() will return UNKNOWN_CARDINALITY. + """ + CARDINALITY_COMPUTE_MODERATE: CardinalityOptions.ComputeLevel.ValueType # 2 + """Moderate effort will be made to determine cardinality, such as reading + index data from source files. If significant work is needed to compute + cardinality (e.g. reading entire source file contents or executing user + defined functions), Cardinality() will return UNKNOWN_CARDINALITY. + """ + + COMPUTE_LEVEL_FIELD_NUMBER: builtins.int + compute_level: global___CardinalityOptions.ComputeLevel.ValueType + def __init__(self, *, compute_level: global___CardinalityOptions.ComputeLevel.ValueType | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["compute_level", b"compute_level"]) -> None: ... + +global___CardinalityOptions = CardinalityOptions + +@typing.final +class DistributeOptions(google.protobuf.message.Message): + """next: 3""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + AUTO_SHARD_POLICY_FIELD_NUMBER: builtins.int + NUM_DEVICES_FIELD_NUMBER: builtins.int + auto_shard_policy: global___AutoShardPolicy.ValueType + num_devices: builtins.int + def __init__( + self, *, auto_shard_policy: global___AutoShardPolicy.ValueType | None = ..., num_devices: builtins.int | None = ... + ) -> None: ... + def HasField( + self, field_name: typing.Literal["num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "auto_shard_policy", + b"auto_shard_policy", + "num_devices", + b"num_devices", + "optional_num_devices", + b"optional_num_devices", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_num_devices", b"optional_num_devices"] + ) -> typing.Literal["num_devices"] | None: ... + +global___DistributeOptions = DistributeOptions + +@typing.final +class OptimizationOptions(google.protobuf.message.Message): + """next: 22""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + APPLY_DEFAULT_OPTIMIZATIONS_FIELD_NUMBER: builtins.int + FILTER_FUSION_FIELD_NUMBER: builtins.int + MAP_AND_BATCH_FUSION_FIELD_NUMBER: builtins.int + MAP_AND_FILTER_FUSION_FIELD_NUMBER: builtins.int + MAP_FUSION_FIELD_NUMBER: builtins.int + MAP_PARALLELIZATION_FIELD_NUMBER: builtins.int + NOOP_ELIMINATION_FIELD_NUMBER: builtins.int + PARALLEL_BATCH_FIELD_NUMBER: builtins.int + SHUFFLE_AND_REPEAT_FUSION_FIELD_NUMBER: builtins.int + FILTER_PARALLELIZATION_FIELD_NUMBER: builtins.int + INJECT_PREFETCH_FIELD_NUMBER: builtins.int + SEQ_INTERLEAVE_PREFETCH_FIELD_NUMBER: builtins.int + apply_default_optimizations: builtins.bool + filter_fusion: builtins.bool + map_and_batch_fusion: builtins.bool + map_and_filter_fusion: builtins.bool + map_fusion: builtins.bool + map_parallelization: builtins.bool + noop_elimination: builtins.bool + parallel_batch: builtins.bool + shuffle_and_repeat_fusion: builtins.bool + filter_parallelization: builtins.bool + inject_prefetch: builtins.bool + seq_interleave_prefetch: builtins.bool + def __init__( + self, + *, + apply_default_optimizations: builtins.bool | None = ..., + filter_fusion: builtins.bool | None = ..., + map_and_batch_fusion: builtins.bool | None = ..., + map_and_filter_fusion: builtins.bool | None = ..., + map_fusion: builtins.bool | None = ..., + map_parallelization: builtins.bool | None = ..., + noop_elimination: builtins.bool | None = ..., + parallel_batch: builtins.bool | None = ..., + shuffle_and_repeat_fusion: builtins.bool | None = ..., + filter_parallelization: builtins.bool | None = ..., + inject_prefetch: builtins.bool | None = ..., + seq_interleave_prefetch: builtins.bool | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "apply_default_optimizations", + b"apply_default_optimizations", + "filter_fusion", + b"filter_fusion", + "filter_parallelization", + b"filter_parallelization", + "inject_prefetch", + b"inject_prefetch", + "map_and_batch_fusion", + b"map_and_batch_fusion", + "map_and_filter_fusion", + b"map_and_filter_fusion", + "map_fusion", + b"map_fusion", + "map_parallelization", + b"map_parallelization", + "noop_elimination", + b"noop_elimination", + "optional_apply_default_optimizations", + b"optional_apply_default_optimizations", + "optional_filter_fusion", + b"optional_filter_fusion", + "optional_filter_parallelization", + b"optional_filter_parallelization", + "optional_inject_prefetch", + b"optional_inject_prefetch", + "optional_map_and_batch_fusion", + b"optional_map_and_batch_fusion", + "optional_map_and_filter_fusion", + b"optional_map_and_filter_fusion", + "optional_map_fusion", + b"optional_map_fusion", + "optional_map_parallelization", + b"optional_map_parallelization", + "optional_noop_elimination", + b"optional_noop_elimination", + "optional_parallel_batch", + b"optional_parallel_batch", + "optional_seq_interleave_prefetch", + b"optional_seq_interleave_prefetch", + "optional_shuffle_and_repeat_fusion", + b"optional_shuffle_and_repeat_fusion", + "parallel_batch", + b"parallel_batch", + "seq_interleave_prefetch", + b"seq_interleave_prefetch", + "shuffle_and_repeat_fusion", + b"shuffle_and_repeat_fusion", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "apply_default_optimizations", + b"apply_default_optimizations", + "filter_fusion", + b"filter_fusion", + "filter_parallelization", + b"filter_parallelization", + "inject_prefetch", + b"inject_prefetch", + "map_and_batch_fusion", + b"map_and_batch_fusion", + "map_and_filter_fusion", + b"map_and_filter_fusion", + "map_fusion", + b"map_fusion", + "map_parallelization", + b"map_parallelization", + "noop_elimination", + b"noop_elimination", + "optional_apply_default_optimizations", + b"optional_apply_default_optimizations", + "optional_filter_fusion", + b"optional_filter_fusion", + "optional_filter_parallelization", + b"optional_filter_parallelization", + "optional_inject_prefetch", + b"optional_inject_prefetch", + "optional_map_and_batch_fusion", + b"optional_map_and_batch_fusion", + "optional_map_and_filter_fusion", + b"optional_map_and_filter_fusion", + "optional_map_fusion", + b"optional_map_fusion", + "optional_map_parallelization", + b"optional_map_parallelization", + "optional_noop_elimination", + b"optional_noop_elimination", + "optional_parallel_batch", + b"optional_parallel_batch", + "optional_seq_interleave_prefetch", + b"optional_seq_interleave_prefetch", + "optional_shuffle_and_repeat_fusion", + b"optional_shuffle_and_repeat_fusion", + "parallel_batch", + b"parallel_batch", + "seq_interleave_prefetch", + b"seq_interleave_prefetch", + "shuffle_and_repeat_fusion", + b"shuffle_and_repeat_fusion", + ], + ) -> None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_apply_default_optimizations", b"optional_apply_default_optimizations"] + ) -> typing.Literal["apply_default_optimizations"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_filter_fusion", b"optional_filter_fusion"] + ) -> typing.Literal["filter_fusion"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_filter_parallelization", b"optional_filter_parallelization"] + ) -> typing.Literal["filter_parallelization"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_inject_prefetch", b"optional_inject_prefetch"] + ) -> typing.Literal["inject_prefetch"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_and_batch_fusion", b"optional_map_and_batch_fusion"] + ) -> typing.Literal["map_and_batch_fusion"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_and_filter_fusion", b"optional_map_and_filter_fusion"] + ) -> typing.Literal["map_and_filter_fusion"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_fusion", b"optional_map_fusion"] + ) -> typing.Literal["map_fusion"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_map_parallelization", b"optional_map_parallelization"] + ) -> typing.Literal["map_parallelization"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_noop_elimination", b"optional_noop_elimination"] + ) -> typing.Literal["noop_elimination"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_parallel_batch", b"optional_parallel_batch"] + ) -> typing.Literal["parallel_batch"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_seq_interleave_prefetch", b"optional_seq_interleave_prefetch"] + ) -> typing.Literal["seq_interleave_prefetch"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion"] + ) -> typing.Literal["shuffle_and_repeat_fusion"] | None: ... + +global___OptimizationOptions = OptimizationOptions + +@typing.final +class ServiceOptions(google.protobuf.message.Message): + """next: 2""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PINNED_FIELD_NUMBER: builtins.int + pinned: builtins.bool + def __init__(self, *, pinned: builtins.bool | None = ...) -> None: ... + def HasField( + self, field_name: typing.Literal["optional_pinned", b"optional_pinned", "pinned", b"pinned"] + ) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["optional_pinned", b"optional_pinned", "pinned", b"pinned"]) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["optional_pinned", b"optional_pinned"] + ) -> typing.Literal["pinned"] | None: ... + +global___ServiceOptions = ServiceOptions + +@typing.final +class ThreadingOptions(google.protobuf.message.Message): + """next: 3""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MAX_INTRA_OP_PARALLELISM_FIELD_NUMBER: builtins.int + PRIVATE_THREADPOOL_SIZE_FIELD_NUMBER: builtins.int + max_intra_op_parallelism: builtins.int + private_threadpool_size: builtins.int + def __init__( + self, *, max_intra_op_parallelism: builtins.int | None = ..., private_threadpool_size: builtins.int | None = ... + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "max_intra_op_parallelism", + b"max_intra_op_parallelism", + "optional_max_intra_op_parallelism", + b"optional_max_intra_op_parallelism", + "optional_private_threadpool_size", + b"optional_private_threadpool_size", + "private_threadpool_size", + b"private_threadpool_size", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "max_intra_op_parallelism", + b"max_intra_op_parallelism", + "optional_max_intra_op_parallelism", + b"optional_max_intra_op_parallelism", + "optional_private_threadpool_size", + b"optional_private_threadpool_size", + "private_threadpool_size", + b"private_threadpool_size", + ], + ) -> None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism"] + ) -> typing.Literal["max_intra_op_parallelism"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_private_threadpool_size", b"optional_private_threadpool_size"] + ) -> typing.Literal["private_threadpool_size"] | None: ... + +global___ThreadingOptions = ThreadingOptions + +@typing.final +class Options(google.protobuf.message.Message): + """Message stored with Dataset objects to control how datasets are processed and + optimized. + + next: 13 + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATASET_NAME_FIELD_NUMBER: builtins.int + FRAMEWORK_TYPE_FIELD_NUMBER: builtins.int + DETERMINISTIC_FIELD_NUMBER: builtins.int + AUTOTUNE_OPTIONS_FIELD_NUMBER: builtins.int + DISTRIBUTE_OPTIONS_FIELD_NUMBER: builtins.int + OPTIMIZATION_OPTIONS_FIELD_NUMBER: builtins.int + SERVICE_OPTIONS_FIELD_NUMBER: builtins.int + SLACK_FIELD_NUMBER: builtins.int + THREADING_OPTIONS_FIELD_NUMBER: builtins.int + EXTERNAL_STATE_POLICY_FIELD_NUMBER: builtins.int + SYMBOLIC_CHECKPOINT_FIELD_NUMBER: builtins.int + WARM_START_FIELD_NUMBER: builtins.int + dataset_name: builtins.str + deterministic: builtins.bool + slack: builtins.bool + external_state_policy: global___ExternalStatePolicy.ValueType + symbolic_checkpoint: builtins.bool + warm_start: builtins.bool + @property + def framework_type(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of frameworks used to generate this dataset.""" + + @property + def autotune_options(self) -> global___AutotuneOptions: + """The autotune options associated with the dataset.""" + + @property + def distribute_options(self) -> global___DistributeOptions: + """The distribution strategy options associated with the dataset.""" + + @property + def optimization_options(self) -> global___OptimizationOptions: + """The optimization options associated with the dataset.""" + + @property + def service_options(self) -> global___ServiceOptions: + """The tf.data service options associated with the dataset.""" + + @property + def threading_options(self) -> global___ThreadingOptions: + """The threading options associated with the dataset.""" + + def __init__( + self, + *, + dataset_name: builtins.str | None = ..., + framework_type: collections.abc.Iterable[builtins.str] | None = ..., + deterministic: builtins.bool | None = ..., + autotune_options: global___AutotuneOptions | None = ..., + distribute_options: global___DistributeOptions | None = ..., + optimization_options: global___OptimizationOptions | None = ..., + service_options: global___ServiceOptions | None = ..., + slack: builtins.bool | None = ..., + threading_options: global___ThreadingOptions | None = ..., + external_state_policy: global___ExternalStatePolicy.ValueType | None = ..., + symbolic_checkpoint: builtins.bool | None = ..., + warm_start: builtins.bool | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "autotune_options", + b"autotune_options", + "dataset_name", + b"dataset_name", + "deterministic", + b"deterministic", + "distribute_options", + b"distribute_options", + "external_state_policy", + b"external_state_policy", + "optimization_options", + b"optimization_options", + "optional_dataset_name", + b"optional_dataset_name", + "optional_deterministic", + b"optional_deterministic", + "optional_external_state_policy", + b"optional_external_state_policy", + "optional_slack", + b"optional_slack", + "optional_symbolic_checkpoint", + b"optional_symbolic_checkpoint", + "optional_warm_start", + b"optional_warm_start", + "service_options", + b"service_options", + "slack", + b"slack", + "symbolic_checkpoint", + b"symbolic_checkpoint", + "threading_options", + b"threading_options", + "warm_start", + b"warm_start", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "autotune_options", + b"autotune_options", + "dataset_name", + b"dataset_name", + "deterministic", + b"deterministic", + "distribute_options", + b"distribute_options", + "external_state_policy", + b"external_state_policy", + "framework_type", + b"framework_type", + "optimization_options", + b"optimization_options", + "optional_dataset_name", + b"optional_dataset_name", + "optional_deterministic", + b"optional_deterministic", + "optional_external_state_policy", + b"optional_external_state_policy", + "optional_slack", + b"optional_slack", + "optional_symbolic_checkpoint", + b"optional_symbolic_checkpoint", + "optional_warm_start", + b"optional_warm_start", + "service_options", + b"service_options", + "slack", + b"slack", + "symbolic_checkpoint", + b"symbolic_checkpoint", + "threading_options", + b"threading_options", + "warm_start", + b"warm_start", + ], + ) -> None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_dataset_name", b"optional_dataset_name"] + ) -> typing.Literal["dataset_name"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_deterministic", b"optional_deterministic"] + ) -> typing.Literal["deterministic"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_external_state_policy", b"optional_external_state_policy"] + ) -> typing.Literal["external_state_policy"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["optional_slack", b"optional_slack"]) -> typing.Literal["slack"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_symbolic_checkpoint", b"optional_symbolic_checkpoint"] + ) -> typing.Literal["symbolic_checkpoint"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["optional_warm_start", b"optional_warm_start"] + ) -> typing.Literal["warm_start"] | None: ... + +global___Options = Options diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi new file mode 100644 index 0000000000..0cfb9bb859 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/dataset_pb2.pyi @@ -0,0 +1,116 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import tensorflow.core.framework.tensor_pb2 +import tensorflow.core.framework.tensor_shape_pb2 +import tensorflow.core.framework.types_pb2 + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class CompressedComponentMetadata(google.protobuf.message.Message): + """This file contains protocol buffers for working with tf.data Datasets. + + Metadata describing a compressed component of a dataset element. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DTYPE_FIELD_NUMBER: builtins.int + TENSOR_SHAPE_FIELD_NUMBER: builtins.int + UNCOMPRESSED_BYTES_FIELD_NUMBER: builtins.int + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType + """The dtype of the component tensor.""" + @property + def tensor_shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: + """The shape of the component tensor.""" + + @property + def uncompressed_bytes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The amount of uncompressed tensor data. + - For string tensors, there is an element for each string indicating the + size of the string. + - For all other tensors, there is a single element indicating the size of + the tensor. + """ + + def __init__( + self, + *, + dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ..., + tensor_shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ..., + uncompressed_bytes: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "dtype", b"dtype", "tensor_shape", b"tensor_shape", "uncompressed_bytes", b"uncompressed_bytes" + ], + ) -> None: ... + +global___CompressedComponentMetadata = CompressedComponentMetadata + +@typing.final +class CompressedElement(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_FIELD_NUMBER: builtins.int + COMPONENT_METADATA_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + data: builtins.bytes + """Compressed tensor bytes for all components of the element.""" + version: builtins.int + """Version of the CompressedElement. CompressedElements may be stored on disk + and read back by later versions of code, so we store a version number to + help readers understand which version they are reading. When you add a new + field to this proto, you need to increment kCompressedElementVersion in + tensorflow/core/data/compression_utils.cc. + """ + @property + def component_metadata( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CompressedComponentMetadata]: + """Metadata for the components of the element.""" + + def __init__( + self, + *, + data: builtins.bytes | None = ..., + component_metadata: collections.abc.Iterable[global___CompressedComponentMetadata] | None = ..., + version: builtins.int | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["component_metadata", b"component_metadata", "data", b"data", "version", b"version"] + ) -> None: ... + +global___CompressedElement = CompressedElement + +@typing.final +class UncompressedElement(google.protobuf.message.Message): + """An uncompressed dataset element.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + COMPONENTS_FIELD_NUMBER: builtins.int + @property + def components( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + tensorflow.core.framework.tensor_pb2.TensorProto + ]: ... + def __init__( + self, *, components: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ... + ) -> None: ... + def ClearField(self, field_name: typing.Literal["components", b"components"]) -> None: ... + +global___UncompressedElement = UncompressedElement diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi new file mode 100644 index 0000000000..41abb3a43b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/device_attributes_pb2.pyi @@ -0,0 +1,140 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class InterconnectLink(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DEVICE_ID_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + STRENGTH_FIELD_NUMBER: builtins.int + device_id: builtins.int + type: builtins.str + strength: builtins.int + def __init__( + self, *, device_id: builtins.int | None = ..., type: builtins.str | None = ..., strength: builtins.int | None = ... + ) -> None: ... + def ClearField( + self, field_name: typing.Literal["device_id", b"device_id", "strength", b"strength", "type", b"type"] + ) -> None: ... + +global___InterconnectLink = InterconnectLink + +@typing.final +class LocalLinks(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LINK_FIELD_NUMBER: builtins.int + @property + def link(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InterconnectLink]: ... + def __init__(self, *, link: collections.abc.Iterable[global___InterconnectLink] | None = ...) -> None: ... + def ClearField(self, field_name: typing.Literal["link", b"link"]) -> None: ... + +global___LocalLinks = LocalLinks + +@typing.final +class DeviceLocality(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BUS_ID_FIELD_NUMBER: builtins.int + NUMA_NODE_FIELD_NUMBER: builtins.int + LINKS_FIELD_NUMBER: builtins.int + bus_id: builtins.int + """Optional bus locality of device. Default value of 0 means + no specific locality. Specific localities are indexed from 1. + """ + numa_node: builtins.int + """Optional NUMA locality of device.""" + @property + def links(self) -> global___LocalLinks: + """Optional local interconnect links to other devices.""" + + def __init__( + self, *, bus_id: builtins.int | None = ..., numa_node: builtins.int | None = ..., links: global___LocalLinks | None = ... + ) -> None: ... + def HasField(self, field_name: typing.Literal["links", b"links"]) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["bus_id", b"bus_id", "links", b"links", "numa_node", b"numa_node"] + ) -> None: ... + +global___DeviceLocality = DeviceLocality + +@typing.final +class DeviceAttributes(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + DEVICE_TYPE_FIELD_NUMBER: builtins.int + MEMORY_LIMIT_FIELD_NUMBER: builtins.int + LOCALITY_FIELD_NUMBER: builtins.int + INCARNATION_FIELD_NUMBER: builtins.int + PHYSICAL_DEVICE_DESC_FIELD_NUMBER: builtins.int + XLA_GLOBAL_ID_FIELD_NUMBER: builtins.int + name: builtins.str + """Fully specified name of the device within a cluster.""" + device_type: builtins.str + """String representation of device_type.""" + memory_limit: builtins.int + """Memory capacity of device in bytes.""" + incarnation: builtins.int + """A device is assigned a global unique number each time it is + initialized. "incarnation" should never be 0. + """ + physical_device_desc: builtins.str + """String representation of the physical device that this device maps to.""" + xla_global_id: builtins.int + """A physical device ID for use in XLA DeviceAssignments, unique across + clients in a multi-client setup. Set to -1 if unavailable, non-negative + otherwise. + """ + @property + def locality(self) -> global___DeviceLocality: + """Platform-specific data about device that may be useful + for supporting efficient data transfers. + """ + + def __init__( + self, + *, + name: builtins.str | None = ..., + device_type: builtins.str | None = ..., + memory_limit: builtins.int | None = ..., + locality: global___DeviceLocality | None = ..., + incarnation: builtins.int | None = ..., + physical_device_desc: builtins.str | None = ..., + xla_global_id: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["locality", b"locality"]) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "device_type", + b"device_type", + "incarnation", + b"incarnation", + "locality", + b"locality", + "memory_limit", + b"memory_limit", + "name", + b"name", + "physical_device_desc", + b"physical_device_desc", + "xla_global_id", + b"xla_global_id", + ], + ) -> None: ... + +global___DeviceAttributes = DeviceAttributes diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi new file mode 100644 index 0000000000..8025593122 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/tensorflow/tensorflow/core/framework/full_type_pb2.pyi @@ -0,0 +1,617 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import sys +import typing + +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FullTypeId: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FullTypeIdEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FullTypeId.ValueType], builtins.type +): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TFT_UNSET: _FullTypeId.ValueType # 0 + """The default represents an uninitialized values.""" + TFT_VAR: _FullTypeId.ValueType # 1 + """Type symbols. Used to construct more complex type expressions like + algebraic data types. + + Type variables may serve as placeholder for any other type ID in type + templates. + + Examples: + TFT_DATASET[TFT_VAR["T"]] is a Dataset returning a type indicated by "T". + TFT_TENSOR[TFT_VAR["T"]] is a Tensor of n element type indicated by "T". + TFT_TENSOR[TFT_VAR["T"]], TFT_TENSOR[TFT_VAR["T"]] are two tensors of + identical element types. + TFT_TENSOR[TFT_VAR["P"]], TFT_TENSOR[TFT_VAR["Q"]] are two tensors of + independent element types. + """ + TFT_ANY: _FullTypeId.ValueType # 2 + """Wildcard type. Describes a parameter of unknown type. In TensorFlow, that + can mean either a "Top" type (accepts any type), or a dynamically typed + object whose type is unknown in context. + Important: "unknown" does not necessarily mean undeterminable! + """ + TFT_PRODUCT: _FullTypeId.ValueType # 3 + """The algebraic product type. This is an algebraic type that may be used just + for logical grouping. Not to confused with TFT_TUPLE which describes a + concrete object of several elements. + + Example: + TFT_DATASET[TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT64]]] + is a Dataset producing two tensors, an integer one and a float one. + """ + TFT_NAMED: _FullTypeId.ValueType # 4 + """Represents a named field, with the name stored in the attribute. + + Parametrization: + TFT_NAMED[]{} + * is the type of the field + * is the field name, as string (thpugh can theoretically be an int + as well) + + Example: + TFT_RECORD[ + TFT_NAMED[TFT_TENSOR[TFT_INT32]]{'foo'}, + TFT_NAMED[TFT_TENSOR[TFT_FLOAT32]]{'bar'}, + ] + is a structure with two fields, an int tensor "foo" and a float tensor + "bar". + """ + TFT_FOR_EACH: _FullTypeId.ValueType # 20 + """Template definition. Expands the variables by repeating a template as + arguments of container. + + Parametrization: + TFT_FOR_EACH[,